< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page

 49 
 50   address base = __ start_a_stub(to_interp_stub_size());
 51   int offset = __ offset();
 52   if (base == nullptr) {
 53     return nullptr;  // CodeBuffer::expand failed
 54   }
 55   // static stub relocation stores the instruction address of the call
 56   __ relocate(static_stub_Relocation::spec(mark));
 57 
 58   {
 59     __ emit_static_call_stub();
 60   }
 61 
 62   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
 63   __ end_a_stub();
 64   return base;
 65 }
 66 #undef __
 67 
 68 int CompiledDirectCall::to_interp_stub_size() {
 69   return MacroAssembler::static_call_stub_size();
 70 }
 71 
 72 int CompiledDirectCall::to_trampoline_stub_size() {
 73   // Somewhat pessimistically, we count 3 instructions here (although
 74   // there are only two) because we sometimes emit an alignment nop.
 75   // Trampoline stubs are always word aligned.
 76   return MacroAssembler::max_trampoline_stub_size();
 77 }
 78 
 79 // Relocation entries for call stub, compiled java to interpreter.
 80 int CompiledDirectCall::reloc_to_interp_stub() {
 81   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
 82 }
 83 
 84 void CompiledDirectCall::set_to_interpreted(const methodHandle& callee, address entry) {
 85   address stub = find_stub();
 86   guarantee(stub != nullptr, "stub not found");
 87 
 88   // Creation also verifies the object.
 89   NativeMovConstReg* method_holder
 90     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 91 







 92 #ifdef ASSERT
 93   NativeJump* jump = MacroAssembler::codestub_branch_needs_far_jump()
 94                          ? nativeGeneralJump_at(method_holder->next_instruction_address())
 95                          : nativeJump_at(method_holder->next_instruction_address());
 96   verify_mt_safe(callee, entry, method_holder, jump);
 97 #endif
 98 
 99   // Update stub.
100   method_holder->set_data((intptr_t)callee());
101   MacroAssembler::pd_patch_instruction(method_holder->next_instruction_address(), entry);
102   ICache::invalidate_range(stub, to_interp_stub_size());
103   // Update jump to call.
104   set_destination_mt_safe(stub);
105 }
106 
107 void CompiledDirectCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
108   // Reset stub.
109   address stub = static_stub->addr();
110   assert(stub != nullptr, "stub not found");
111   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
112   // Creation also verifies the object.
113   NativeMovConstReg* method_holder
114     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
115   method_holder->set_data(0);
116   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
117   jump->set_jump_destination((address)-1);
118 }
119 
120 //-----------------------------------------------------------------------------
121 // Non-product mode code

 49 
 50   address base = __ start_a_stub(to_interp_stub_size());
 51   int offset = __ offset();
 52   if (base == nullptr) {
 53     return nullptr;  // CodeBuffer::expand failed
 54   }
 55   // static stub relocation stores the instruction address of the call
 56   __ relocate(static_stub_Relocation::spec(mark));
 57 
 58   {
 59     __ emit_static_call_stub();
 60   }
 61 
 62   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
 63   __ end_a_stub();
 64   return base;
 65 }
 66 #undef __
 67 
 68 int CompiledDirectCall::to_interp_stub_size() {
 69   return MacroAssembler::max_static_call_stub_size();
 70 }
 71 
 72 int CompiledDirectCall::to_trampoline_stub_size() {
 73   // Somewhat pessimistically, we count 3 instructions here (although
 74   // there are only two) because we sometimes emit an alignment nop.
 75   // Trampoline stubs are always word aligned.
 76   return MacroAssembler::max_trampoline_stub_size();
 77 }
 78 
 79 // Relocation entries for call stub, compiled java to interpreter.
 80 int CompiledDirectCall::reloc_to_interp_stub() {
 81   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
 82 }
 83 
 84 void CompiledDirectCall::set_to_interpreted(const methodHandle& callee, address entry) {
 85   address stub = find_stub();
 86   guarantee(stub != nullptr, "stub not found");
 87 
 88   // Creation also verifies the object.
 89   NativeMovConstReg* method_holder
 90     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 91 
 92   // In AOT "production" run we have mixture of AOTed and normal JITed code.
 93   // Static call stub in AOTed nmethod always has far jump.
 94   // Normal JITed nmethod may have short or far jump depending on distance.
 95   // Determine actual jump instruction we have in code.
 96   address next_instr = method_holder->next_instruction_address();
 97   bool is_general_jump = nativeInstruction_at(next_instr)->is_general_jump();
 98 
 99 #ifdef ASSERT
100   NativeJump* jump = is_general_jump ? nativeGeneralJump_at(next_instr) : nativeJump_at(next_instr);


101   verify_mt_safe(callee, entry, method_holder, jump);
102 #endif
103 
104   // Update stub.
105   method_holder->set_data((intptr_t)callee());
106   MacroAssembler::pd_patch_instruction(next_instr, entry);
107   ICache::invalidate_range(stub, to_interp_stub_size());
108   // Update jump to call.
109   set_destination_mt_safe(stub);
110 }
111 
112 void CompiledDirectCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
113   // Reset stub.
114   address stub = static_stub->addr();
115   assert(stub != nullptr, "stub not found");
116   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
117   // Creation also verifies the object.
118   NativeMovConstReg* method_holder
119     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
120   method_holder->set_data(0);
121   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
122   jump->set_jump_destination((address)-1);
123 }
124 
125 //-----------------------------------------------------------------------------
126 // Non-product mode code
< prev index next >