< prev index next >

src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp

Print this page

  33 #include "opto/output.hpp"
  34 #include "opto/opcodes.hpp"
  35 #include "opto/subnode.hpp"
  36 #include "runtime/globals.hpp"
  37 #include "runtime/objectMonitor.hpp"
  38 #include "runtime/stubRoutines.hpp"
  39 #include "utilities/checkedCast.hpp"
  40 #include "utilities/globalDefinitions.hpp"
  41 #include "utilities/powerOfTwo.hpp"
  42 #include "utilities/sizes.hpp"
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #define STOP(error) stop(error)
  47 #else
  48 #define BLOCK_COMMENT(str) block_comment(str)
  49 #define STOP(error) block_comment(error); stop(error)
  50 #endif
  51 
  52 // C2 compiled method's prolog code.
  53 void C2_MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b, bool is_stub) {



















  54 
  55   // WARNING: Initial instruction MUST be 5 bytes or longer so that
  56   // NativeJump::patch_verified_entry will be able to patch out the entry
  57   // code safely. The push to verify stack depth is ok at 5 bytes,
  58   // the frame allocation can be either 3 or 6 bytes. So if we don't do
  59   // stack bang then we must use the 6 byte frame allocation even if
  60   // we have no frame. :-(
  61   assert(stack_bang_size >= framesize || stack_bang_size <= 0, "stack bang size incorrect");
  62 
  63   assert((framesize & (StackAlignmentInBytes-1)) == 0, "frame size not aligned");
  64   // Remove word for return addr
  65   framesize -= wordSize;
  66   stack_bang_size -= wordSize;
  67 
  68   // Calls to C2R adapters often do not accept exceptional returns.
  69   // We require that their callers must bang for them.  But be careful, because
  70   // some VM calls (such as call site linkage) can use several kilobytes of
  71   // stack.  But the stack safety zone should account for that.
  72   // See bugs 4446381, 4468289, 4497237.
  73   if (stack_bang_size > 0) {

  86     // Create frame
  87     if (framesize) {
  88       subptr(rsp, framesize);
  89     }
  90   } else {
  91     // Create frame (force generation of a 4 byte immediate value)
  92     subptr_imm32(rsp, framesize);
  93 
  94     // Save RBP register now.
  95     framesize -= wordSize;
  96     movptr(Address(rsp, framesize), rbp);
  97     // Save caller's stack pointer into RBP if the frame pointer is preserved.
  98     if (PreserveFramePointer) {
  99       movptr(rbp, rsp);
 100       if (framesize > 0) {
 101         addptr(rbp, framesize);
 102       }
 103     }
 104   }
 105 






 106   if (VerifyStackAtCalls) { // Majik cookie to verify stack depth
 107     framesize -= wordSize;
 108     movptr(Address(rsp, framesize), (int32_t)0xbadb100d);
 109   }
 110 
 111 #ifndef _LP64
 112   // If method sets FPU control word do it now
 113   if (fp_mode_24b) {
 114     fldcw(ExternalAddress(StubRoutines::x86::addr_fpu_cntrl_wrd_24()));
 115   }
 116   if (UseSSE >= 2 && VerifyFPU) {
 117     verify_FPU(0, "FPU stack must be clean on entry");
 118   }
 119 #endif
 120 
 121 #ifdef ASSERT
 122   if (VerifyStackAtCalls) {
 123     Label L;
 124     push(rax);
 125     mov(rax, rsp);
 126     andptr(rax, StackAlignmentInBytes-1);
 127     cmpptr(rax, StackAlignmentInBytes-wordSize);
 128     pop(rax);
 129     jcc(Assembler::equal, L);
 130     STOP("Stack is not properly aligned!");
 131     bind(L);
 132   }
 133 #endif

 134 
 135   if (!is_stub) {
 136     BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
 137  #ifdef _LP64
 138     if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) {
 139       // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
 140       Label dummy_slow_path;
 141       Label dummy_continuation;
 142       Label* slow_path = &dummy_slow_path;
 143       Label* continuation = &dummy_continuation;
 144       if (!Compile::current()->output()->in_scratch_emit_size()) {
 145         // Use real labels from actual stub when not emitting code for the purpose of measuring its size
 146         C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
 147         Compile::current()->output()->add_stub(stub);
 148         slow_path = &stub->entry();
 149         continuation = &stub->continuation();
 150       }
 151       bs->nmethod_entry_barrier(this, slow_path, continuation);
 152     }


 153 #else
 154     // Don't bother with out-of-line nmethod entry barrier stub for x86_32.
 155     bs->nmethod_entry_barrier(this, nullptr /* slow_path */, nullptr /* continuation */);
 156 #endif
 157   }
 158 }
 159 
 160 inline Assembler::AvxVectorLen C2_MacroAssembler::vector_length_encoding(int vlen_in_bytes) {
 161   switch (vlen_in_bytes) {
 162     case  4: // fall-through
 163     case  8: // fall-through
 164     case 16: return Assembler::AVX_128bit;
 165     case 32: return Assembler::AVX_256bit;
 166     case 64: return Assembler::AVX_512bit;
 167 
 168     default: {
 169       ShouldNotReachHere();
 170       return Assembler::AVX_NoVec;
 171     }
 172   }
 173 }
 174 
 175 // fast_lock and fast_unlock used by C2
 176 
 177 // Because the transitions from emitted code to the runtime

 275 
 276   Label IsInflated, DONE_LABEL, NO_COUNT, COUNT;
 277 
 278   if (DiagnoseSyncOnValueBasedClasses != 0) {
 279     load_klass(tmpReg, objReg, scrReg);
 280     testb(Address(tmpReg, Klass::misc_flags_offset()), KlassFlags::_misc_is_value_based_class);
 281     jcc(Assembler::notZero, DONE_LABEL);
 282   }
 283 
 284   movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes()));          // [FETCH]
 285   testptr(tmpReg, markWord::monitor_value); // inflated vs stack-locked|neutral
 286   jcc(Assembler::notZero, IsInflated);
 287 
 288   if (LockingMode == LM_MONITOR) {
 289     // Clear ZF so that we take the slow path at the DONE label. objReg is known to be not 0.
 290     testptr(objReg, objReg);
 291   } else {
 292     assert(LockingMode == LM_LEGACY, "must be");
 293     // Attempt stack-locking ...
 294     orptr (tmpReg, markWord::unlocked_value);




 295     movptr(Address(boxReg, 0), tmpReg);          // Anticipate successful CAS
 296     lock();
 297     cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes()));      // Updates tmpReg
 298     jcc(Assembler::equal, COUNT);           // Success
 299 
 300     // Recursive locking.
 301     // The object is stack-locked: markword contains stack pointer to BasicLock.
 302     // Locked by current thread if difference with current SP is less than one page.
 303     subptr(tmpReg, rsp);
 304     // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
 305     andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - (int)os::vm_page_size())) );
 306     movptr(Address(boxReg, 0), tmpReg);
 307   }
 308   jmp(DONE_LABEL);
 309 
 310   bind(IsInflated);
 311   // The object is inflated. tmpReg contains pointer to ObjectMonitor* + markWord::monitor_value
 312 
 313 #ifndef _LP64
 314   // The object is inflated.

  33 #include "opto/output.hpp"
  34 #include "opto/opcodes.hpp"
  35 #include "opto/subnode.hpp"
  36 #include "runtime/globals.hpp"
  37 #include "runtime/objectMonitor.hpp"
  38 #include "runtime/stubRoutines.hpp"
  39 #include "utilities/checkedCast.hpp"
  40 #include "utilities/globalDefinitions.hpp"
  41 #include "utilities/powerOfTwo.hpp"
  42 #include "utilities/sizes.hpp"
  43 
  44 #ifdef PRODUCT
  45 #define BLOCK_COMMENT(str) /* nothing */
  46 #define STOP(error) stop(error)
  47 #else
  48 #define BLOCK_COMMENT(str) block_comment(str)
  49 #define STOP(error) block_comment(error); stop(error)
  50 #endif
  51 
  52 // C2 compiled method's prolog code.
  53 void C2_MacroAssembler::verified_entry(Compile* C, int sp_inc) {
  54   if (C->clinit_barrier_on_entry()) {
  55     assert(VM_Version::supports_fast_class_init_checks(), "sanity");
  56     assert(!C->method()->holder()->is_not_initialized(), "initialization should have been started");
  57 
  58     Label L_skip_barrier;
  59     Register klass = rscratch1;
  60 
  61     mov_metadata(klass, C->method()->holder()->constant_encoding());
  62     clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
  63 
  64     jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
  65 
  66     bind(L_skip_barrier);
  67   }
  68 
  69   int framesize = C->output()->frame_size_in_bytes();
  70   int bangsize = C->output()->bang_size_in_bytes();
  71   bool fp_mode_24b = false;
  72   int stack_bang_size = C->output()->need_stack_bang(bangsize) ? bangsize : 0;
  73 
  74   // WARNING: Initial instruction MUST be 5 bytes or longer so that
  75   // NativeJump::patch_verified_entry will be able to patch out the entry
  76   // code safely. The push to verify stack depth is ok at 5 bytes,
  77   // the frame allocation can be either 3 or 6 bytes. So if we don't do
  78   // stack bang then we must use the 6 byte frame allocation even if
  79   // we have no frame. :-(
  80   assert(stack_bang_size >= framesize || stack_bang_size <= 0, "stack bang size incorrect");
  81 
  82   assert((framesize & (StackAlignmentInBytes-1)) == 0, "frame size not aligned");
  83   // Remove word for return addr
  84   framesize -= wordSize;
  85   stack_bang_size -= wordSize;
  86 
  87   // Calls to C2R adapters often do not accept exceptional returns.
  88   // We require that their callers must bang for them.  But be careful, because
  89   // some VM calls (such as call site linkage) can use several kilobytes of
  90   // stack.  But the stack safety zone should account for that.
  91   // See bugs 4446381, 4468289, 4497237.
  92   if (stack_bang_size > 0) {

 105     // Create frame
 106     if (framesize) {
 107       subptr(rsp, framesize);
 108     }
 109   } else {
 110     // Create frame (force generation of a 4 byte immediate value)
 111     subptr_imm32(rsp, framesize);
 112 
 113     // Save RBP register now.
 114     framesize -= wordSize;
 115     movptr(Address(rsp, framesize), rbp);
 116     // Save caller's stack pointer into RBP if the frame pointer is preserved.
 117     if (PreserveFramePointer) {
 118       movptr(rbp, rsp);
 119       if (framesize > 0) {
 120         addptr(rbp, framesize);
 121       }
 122     }
 123   }
 124 
 125   if (C->needs_stack_repair()) {
 126     // Save stack increment just below the saved rbp (also account for fixed framesize and rbp)
 127     assert((sp_inc & (StackAlignmentInBytes-1)) == 0, "stack increment not aligned");
 128     movptr(Address(rsp, framesize - wordSize), sp_inc + framesize + wordSize);
 129   }
 130 
 131   if (VerifyStackAtCalls) { // Majik cookie to verify stack depth
 132     framesize -= wordSize;
 133     movptr(Address(rsp, framesize), (int32_t)0xbadb100d);
 134   }
 135 
 136 #ifndef _LP64
 137   // If method sets FPU control word do it now
 138   if (fp_mode_24b) {
 139     fldcw(ExternalAddress(StubRoutines::x86::addr_fpu_cntrl_wrd_24()));
 140   }
 141   if (UseSSE >= 2 && VerifyFPU) {
 142     verify_FPU(0, "FPU stack must be clean on entry");
 143   }
 144 #endif
 145 
 146 #ifdef ASSERT
 147   if (VerifyStackAtCalls) {
 148     Label L;
 149     push(rax);
 150     mov(rax, rsp);
 151     andptr(rax, StackAlignmentInBytes-1);
 152     cmpptr(rax, StackAlignmentInBytes-wordSize);
 153     pop(rax);
 154     jcc(Assembler::equal, L);
 155     STOP("Stack is not properly aligned!");
 156     bind(L);
 157   }
 158 #endif
 159 }
 160 
 161 void C2_MacroAssembler::entry_barrier() {
 162   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
 163 #ifdef _LP64
 164   if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) {
 165     // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
 166     Label dummy_slow_path;
 167     Label dummy_continuation;
 168     Label* slow_path = &dummy_slow_path;
 169     Label* continuation = &dummy_continuation;
 170     if (!Compile::current()->output()->in_scratch_emit_size()) {
 171       // Use real labels from actual stub when not emitting code for the purpose of measuring its size
 172       C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
 173       Compile::current()->output()->add_stub(stub);
 174       slow_path = &stub->entry();
 175       continuation = &stub->continuation();


 176     }
 177     bs->nmethod_entry_barrier(this, slow_path, continuation);
 178   }
 179 #else
 180   // Don't bother with out-of-line nmethod entry barrier stub for x86_32.
 181   bs->nmethod_entry_barrier(this, nullptr /* slow_path */, nullptr /* continuation */);
 182 #endif

 183 }
 184 
 185 inline Assembler::AvxVectorLen C2_MacroAssembler::vector_length_encoding(int vlen_in_bytes) {
 186   switch (vlen_in_bytes) {
 187     case  4: // fall-through
 188     case  8: // fall-through
 189     case 16: return Assembler::AVX_128bit;
 190     case 32: return Assembler::AVX_256bit;
 191     case 64: return Assembler::AVX_512bit;
 192 
 193     default: {
 194       ShouldNotReachHere();
 195       return Assembler::AVX_NoVec;
 196     }
 197   }
 198 }
 199 
 200 // fast_lock and fast_unlock used by C2
 201 
 202 // Because the transitions from emitted code to the runtime

 300 
 301   Label IsInflated, DONE_LABEL, NO_COUNT, COUNT;
 302 
 303   if (DiagnoseSyncOnValueBasedClasses != 0) {
 304     load_klass(tmpReg, objReg, scrReg);
 305     testb(Address(tmpReg, Klass::misc_flags_offset()), KlassFlags::_misc_is_value_based_class);
 306     jcc(Assembler::notZero, DONE_LABEL);
 307   }
 308 
 309   movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes()));          // [FETCH]
 310   testptr(tmpReg, markWord::monitor_value); // inflated vs stack-locked|neutral
 311   jcc(Assembler::notZero, IsInflated);
 312 
 313   if (LockingMode == LM_MONITOR) {
 314     // Clear ZF so that we take the slow path at the DONE label. objReg is known to be not 0.
 315     testptr(objReg, objReg);
 316   } else {
 317     assert(LockingMode == LM_LEGACY, "must be");
 318     // Attempt stack-locking ...
 319     orptr (tmpReg, markWord::unlocked_value);
 320     if (EnableValhalla) {
 321       // Mask inline_type bit such that we go to the slow path if object is an inline type
 322       andptr(tmpReg, ~((int) markWord::inline_type_bit_in_place));
 323     }
 324     movptr(Address(boxReg, 0), tmpReg);          // Anticipate successful CAS
 325     lock();
 326     cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes()));      // Updates tmpReg
 327     jcc(Assembler::equal, COUNT);           // Success
 328 
 329     // Recursive locking.
 330     // The object is stack-locked: markword contains stack pointer to BasicLock.
 331     // Locked by current thread if difference with current SP is less than one page.
 332     subptr(tmpReg, rsp);
 333     // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
 334     andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - (int)os::vm_page_size())) );
 335     movptr(Address(boxReg, 0), tmpReg);
 336   }
 337   jmp(DONE_LABEL);
 338 
 339   bind(IsInflated);
 340   // The object is inflated. tmpReg contains pointer to ObjectMonitor* + markWord::monitor_value
 341 
 342 #ifndef _LP64
 343   // The object is inflated.
< prev index next >