1 /*
  2  * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "asm/macroAssembler.inline.hpp"
 27 #include "code/compiledIC.hpp"
 28 #include "code/nmethod.hpp"
 29 #include "logging/log.hpp"
 30 #include "memory/resourceArea.hpp"
 31 #include "runtime/mutexLocker.hpp"
 32 #include "runtime/safepoint.hpp"
 33 
 34 // ----------------------------------------------------------------------------
 35 
 36 #define __ masm->
 37 address CompiledDirectCall::emit_to_interp_stub(MacroAssembler *masm, address mark) {
 38   precond(__ code()->stubs()->start() != badAddress);
 39   precond(__ code()->stubs()->end() != badAddress);
 40 
 41   // Stub is fixed up when the corresponding call is converted from
 42   // calling compiled code to calling interpreted code.
 43   // mov rmethod, 0
 44   // jmp -4 # to self
 45 
 46   if (mark == nullptr) {
 47     mark = __ inst_mark();  // Get mark within main instrs section.
 48   }
 49 
 50   address base = __ start_a_stub(to_interp_stub_size());
 51   int offset = __ offset();
 52   if (base == nullptr) {
 53     return nullptr;  // CodeBuffer::expand failed
 54   }
 55   // static stub relocation stores the instruction address of the call
 56   __ relocate(static_stub_Relocation::spec(mark));
 57 
 58   {
 59     __ emit_static_call_stub();
 60   }
 61 
 62   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
 63   __ end_a_stub();
 64   return base;
 65 }
 66 #undef __
 67 
 68 int CompiledDirectCall::to_interp_stub_size() {
 69   return MacroAssembler::static_call_stub_size();
 70 }
 71 
 72 int CompiledDirectCall::to_trampoline_stub_size() {
 73   // Somewhat pessimistically, we count 3 instructions here (although
 74   // there are only two) because we sometimes emit an alignment nop.
 75   // Trampoline stubs are always word aligned.
 76   return MacroAssembler::max_trampoline_stub_size();
 77 }
 78 
 79 // Relocation entries for call stub, compiled java to interpreter.
 80 int CompiledDirectCall::reloc_to_interp_stub() {
 81   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
 82 }
 83 
 84 void CompiledDirectCall::set_to_interpreted(const methodHandle& callee, address entry) {
 85   address stub = find_stub();
 86   guarantee(stub != nullptr, "stub not found");
 87 
 88   // Creation also verifies the object.
 89   NativeMovConstReg* method_holder
 90     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 91 
 92 #ifdef ASSERT
 93   NativeJump* jump = MacroAssembler::codestub_branch_needs_far_jump()
 94                          ? nativeGeneralJump_at(method_holder->next_instruction_address())
 95                          : nativeJump_at(method_holder->next_instruction_address());
 96   verify_mt_safe(callee, entry, method_holder, jump);
 97 #endif
 98 
 99   // Update stub.
100   method_holder->set_data((intptr_t)callee());
101   MacroAssembler::pd_patch_instruction(method_holder->next_instruction_address(), entry);
102   ICache::invalidate_range(stub, to_interp_stub_size());
103   // Update jump to call.
104   set_destination_mt_safe(stub);
105 }
106 
107 void CompiledDirectCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
108   // Reset stub.
109   address stub = static_stub->addr();
110   assert(stub != nullptr, "stub not found");
111   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
112   // Creation also verifies the object.
113   NativeMovConstReg* method_holder
114     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
115   method_holder->set_data(0);
116   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
117   jump->set_jump_destination((address)-1);
118 }
119 
120 //-----------------------------------------------------------------------------
121 // Non-product mode code
122 #ifndef PRODUCT
123 
124 void CompiledDirectCall::verify() {
125   // Verify call.
126   _call->verify();
127   _call->verify_alignment();
128 
129   // Verify stub.
130   address stub = find_stub();
131   assert(stub != nullptr, "no stub found for static call");
132   // Creation also verifies the object.
133   NativeMovConstReg* method_holder
134     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
135   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
136 
137   // Verify state.
138   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
139 }
140 
141 #endif // !PRODUCT