< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page

        

*** 59,86 **** #if INCLUDE_AOT // Don't create a Metadata reloc if we're generating immutable PIC. if (cbuf.immutable_PIC()) { __ movptr(rmethod, 0); ! __ movptr(rscratch1, 0); ! __ br(rscratch1); ! ! } else ! #endif ! { ! __ emit_static_call_stub(); } assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big"); __ end_a_stub(); return base; } #undef __ int CompiledStaticCall::to_interp_stub_size() { ! // isb; movk; movz; movz; movk; movz; movz; br ! return 8 * NativeInstruction::instruction_size; } int CompiledStaticCall::to_trampoline_stub_size() { // Somewhat pessimistically, we count 3 instructions here (although // there are only two) because we sometimes emit an alignment nop. --- 59,85 ---- #if INCLUDE_AOT // Don't create a Metadata reloc if we're generating immutable PIC. if (cbuf.immutable_PIC()) { __ movptr(rmethod, 0); ! } else { ! __ mov_metadata(rmethod, (Metadata*)NULL); } + #else + __ mov_metadata(rmethod, (Metadata*)NULL); + #endif + __ movptr(rscratch1, 0); + __ br(rscratch1); assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big"); __ end_a_stub(); return base; } #undef __ int CompiledStaticCall::to_interp_stub_size() { ! return 7 * NativeInstruction::instruction_size; } int CompiledStaticCall::to_trampoline_stub_size() { // Somewhat pessimistically, we count 3 instructions here (although // there are only two) because we sometimes emit an alignment nop.
*** 158,169 **** p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. ! NativeMovConstReg* method_holder ! = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); #ifndef PRODUCT NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); // read the value once volatile intptr_t data = method_holder->data(); --- 157,167 ---- p2i(instruction_address()), callee->name_and_sig_as_C_string()); } // Creation also verifies the object. ! NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); #ifndef PRODUCT NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address()); // read the value once volatile intptr_t data = method_holder->data();
*** 184,195 **** // Reset stub. address stub = static_stub->addr(); assert(stub != NULL, "stub not found"); assert(CompiledICLocker::is_safe(stub), "mt unsafe call"); // Creation also verifies the object. ! NativeMovConstReg* method_holder ! = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); method_holder->set_data(0); } //----------------------------------------------------------------------------- // Non-product mode code --- 182,192 ---- // Reset stub. address stub = static_stub->addr(); assert(stub != NULL, "stub not found"); assert(CompiledICLocker::is_safe(stub), "mt unsafe call"); // Creation also verifies the object. ! NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); method_holder->set_data(0); } //----------------------------------------------------------------------------- // Non-product mode code
*** 202,214 **** // Verify stub. address stub = find_stub(false /* is_aot */); assert(stub != NULL, "no stub found for static call"); // Creation also verifies the object. ! NativeMovConstReg* method_holder ! = nativeMovConstReg_at(stub + NativeInstruction::instruction_size); ! NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); } --- 199,210 ---- // Verify stub. address stub = find_stub(false /* is_aot */); assert(stub != NULL, "no stub found for static call"); // Creation also verifies the object. ! NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); ! NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); // Verify state. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); }
< prev index next >