< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page




  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);
  64     __ movptr(rscratch1, 0);
  65     __ br(rscratch1);
  66 
  67   } else
  68 #endif
  69   {
  70     __ emit_static_call_stub();
  71   }





  72 
  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   // isb; movk; movz; movz; movk; movz; movz; br
  81   return 8 * NativeInstruction::instruction_size;
  82 }
  83 
  84 int CompiledStaticCall::to_trampoline_stub_size() {
  85   // Somewhat pessimistically, we count 3 instructions here (although
  86   // there are only two) because we sometimes emit an alignment nop.
  87   // Trampoline stubs are always word aligned.
  88   return 3 * NativeInstruction::instruction_size + wordSize;
  89 }
  90 
  91 // Relocation entries for call stub, compiled java to interpreter.
  92 int CompiledStaticCall::reloc_to_interp_stub() {
  93   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  94 }
  95 
  96 #if INCLUDE_AOT
  97 #define __ _masm.
  98 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  99   if (!UseAOT) {
 100     return;
 101   }


 143   if (UseAOT) {
 144     return 5 * 4;  // movz; movk; movk; movk; br
 145   } else {
 146     return 0;
 147   }
 148 }
 149 #endif // INCLUDE_AOT
 150 
 151 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 152   address stub = find_stub(false /* is_aot */);
 153   guarantee(stub != NULL, "stub not found");
 154 
 155   if (TraceICs) {
 156     ResourceMark rm;
 157     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 158                   p2i(instruction_address()),
 159                   callee->name_and_sig_as_C_string());
 160   }
 161 
 162   // Creation also verifies the object.
 163   NativeMovConstReg* method_holder
 164     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 165 #ifndef PRODUCT
 166   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 167 
 168   // read the value once
 169   volatile intptr_t data = method_holder->data();
 170   assert(data == 0 || data == (intptr_t)callee(),
 171          "a) MT-unsafe modification of inline cache");
 172   assert(data == 0 || jump->jump_destination() == entry,
 173          "b) MT-unsafe modification of inline cache");
 174 #endif
 175   // Update stub.
 176   method_holder->set_data((intptr_t)callee());
 177   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
 178   ICache::invalidate_range(stub, to_interp_stub_size());
 179   // Update jump to call.
 180   set_destination_mt_safe(stub);
 181 }
 182 
 183 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 184   // Reset stub.
 185   address stub = static_stub->addr();
 186   assert(stub != NULL, "stub not found");
 187   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 188   // Creation also verifies the object.
 189   NativeMovConstReg* method_holder
 190     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 191   method_holder->set_data(0);
 192 }
 193 
 194 //-----------------------------------------------------------------------------
 195 // Non-product mode code
 196 #ifndef PRODUCT
 197 
 198 void CompiledDirectStaticCall::verify() {
 199   // Verify call.
 200   _call->verify();
 201   _call->verify_alignment();
 202 
 203   // Verify stub.
 204   address stub = find_stub(false /* is_aot */);
 205   assert(stub != NULL, "no stub found for static call");
 206   // Creation also verifies the object.
 207   NativeMovConstReg* method_holder
 208     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 209   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
 210 
 211   // Verify state.
 212   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 213 }
 214 
 215 #endif // !PRODUCT


  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);
  64   } else {
  65     __ mov_metadata(rmethod, (Metadata*)NULL);





  66   }
  67 #else
  68   __ mov_metadata(rmethod, (Metadata*)NULL);
  69 #endif
  70   __ movptr(rscratch1, 0);
  71   __ br(rscratch1);
  72 
  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   return 7 * NativeInstruction::instruction_size;

  81 }
  82 
  83 int CompiledStaticCall::to_trampoline_stub_size() {
  84   // Somewhat pessimistically, we count 3 instructions here (although
  85   // there are only two) because we sometimes emit an alignment nop.
  86   // Trampoline stubs are always word aligned.
  87   return 3 * NativeInstruction::instruction_size + wordSize;
  88 }
  89 
  90 // Relocation entries for call stub, compiled java to interpreter.
  91 int CompiledStaticCall::reloc_to_interp_stub() {
  92   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  93 }
  94 
  95 #if INCLUDE_AOT
  96 #define __ _masm.
  97 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  98   if (!UseAOT) {
  99     return;
 100   }


 142   if (UseAOT) {
 143     return 5 * 4;  // movz; movk; movk; movk; br
 144   } else {
 145     return 0;
 146   }
 147 }
 148 #endif // INCLUDE_AOT
 149 
 150 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 151   address stub = find_stub(false /* is_aot */);
 152   guarantee(stub != NULL, "stub not found");
 153 
 154   if (TraceICs) {
 155     ResourceMark rm;
 156     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 157                   p2i(instruction_address()),
 158                   callee->name_and_sig_as_C_string());
 159   }
 160 
 161   // Creation also verifies the object.
 162   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);

 163 #ifndef PRODUCT
 164   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 165 
 166   // read the value once
 167   volatile intptr_t data = method_holder->data();
 168   assert(data == 0 || data == (intptr_t)callee(),
 169          "a) MT-unsafe modification of inline cache");
 170   assert(data == 0 || jump->jump_destination() == entry,
 171          "b) MT-unsafe modification of inline cache");
 172 #endif
 173   // Update stub.
 174   method_holder->set_data((intptr_t)callee());
 175   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
 176   ICache::invalidate_range(stub, to_interp_stub_size());
 177   // Update jump to call.
 178   set_destination_mt_safe(stub);
 179 }
 180 
 181 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 182   // Reset stub.
 183   address stub = static_stub->addr();
 184   assert(stub != NULL, "stub not found");
 185   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 186   // Creation also verifies the object.
 187   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);

 188   method_holder->set_data(0);
 189 }
 190 
 191 //-----------------------------------------------------------------------------
 192 // Non-product mode code
 193 #ifndef PRODUCT
 194 
 195 void CompiledDirectStaticCall::verify() {
 196   // Verify call.
 197   _call->verify();
 198   _call->verify_alignment();
 199 
 200   // Verify stub.
 201   address stub = find_stub(false /* is_aot */);
 202   assert(stub != NULL, "no stub found for static call");
 203   // Creation also verifies the object.
 204   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 205   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());

 206 
 207   // Verify state.
 208   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 209 }
 210 
 211 #endif // !PRODUCT
< prev index next >