1 /*
  2  * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "asm/macroAssembler.inline.hpp"
 27 #include "code/compiledIC.hpp"
 28 #include "code/nmethod.hpp"
 29 #include "logging/log.hpp"
 30 #include "memory/resourceArea.hpp"
 31 #include "runtime/mutexLocker.hpp"
 32 #include "runtime/safepoint.hpp"
 33 
 34 // ----------------------------------------------------------------------------
 35 
 36 #define __ masm->
 37 address CompiledDirectCall::emit_to_interp_stub(MacroAssembler *masm, address mark) {
 38   precond(__ code()->stubs()->start() != badAddress);
 39   precond(__ code()->stubs()->end() != badAddress);
 40 
 41   // Stub is fixed up when the corresponding call is converted from
 42   // calling compiled code to calling interpreted code.
 43   // mov rmethod, 0
 44   // jmp -4 # to self
 45 
 46   if (mark == nullptr) {
 47     mark = __ inst_mark();  // Get mark within main instrs section.
 48   }
 49 
 50   address base = __ start_a_stub(to_interp_stub_size());
 51   int offset = __ offset();
 52   if (base == nullptr) {
 53     return nullptr;  // CodeBuffer::expand failed
 54   }
 55   // static stub relocation stores the instruction address of the call
 56   __ relocate(static_stub_Relocation::spec(mark));
 57 
 58   {
 59     __ emit_static_call_stub();
 60   }
 61 
 62   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
 63   __ end_a_stub();
 64   return base;
 65 }
 66 #undef __
 67 
 68 int CompiledDirectCall::to_interp_stub_size() {
 69   return MacroAssembler::max_static_call_stub_size();
 70 }
 71 
 72 int CompiledDirectCall::to_trampoline_stub_size() {
 73   // Somewhat pessimistically, we count 3 instructions here (although
 74   // there are only two) because we sometimes emit an alignment nop.
 75   // Trampoline stubs are always word aligned.
 76   return MacroAssembler::max_trampoline_stub_size();
 77 }
 78 
 79 // Relocation entries for call stub, compiled java to interpreter.
 80 int CompiledDirectCall::reloc_to_interp_stub() {
 81   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
 82 }
 83 
 84 void CompiledDirectCall::set_to_interpreted(const methodHandle& callee, address entry) {
 85   address stub = find_stub();
 86   guarantee(stub != nullptr, "stub not found");
 87 
 88   // Creation also verifies the object.
 89   NativeMovConstReg* method_holder
 90     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 91 
 92   // In AOT "production" run we have mixture of AOTed and normal JITed code.
 93   // Static call stub in AOTed nmethod always has far jump.
 94   // Normal JITed nmethod may have short or far jump depending on distance.
 95   // Determine actual jump instruction we have in code.
 96   address next_instr = method_holder->next_instruction_address();
 97   bool is_general_jump = nativeInstruction_at(next_instr)->is_general_jump();
 98 
 99 #ifdef ASSERT
100   NativeJump* jump = is_general_jump ? nativeGeneralJump_at(next_instr) : nativeJump_at(next_instr);
101   verify_mt_safe(callee, entry, method_holder, jump);
102 #endif
103 
104   // Update stub.
105   method_holder->set_data((intptr_t)callee());
106   MacroAssembler::pd_patch_instruction(next_instr, entry);
107   ICache::invalidate_range(stub, to_interp_stub_size());
108   // Update jump to call.
109   set_destination_mt_safe(stub);
110 }
111 
112 void CompiledDirectCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
113   // Reset stub.
114   address stub = static_stub->addr();
115   assert(stub != nullptr, "stub not found");
116   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
117   // Creation also verifies the object.
118   NativeMovConstReg* method_holder
119     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
120   method_holder->set_data(0);
121   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
122   jump->set_jump_destination((address)-1);
123 }
124 
125 //-----------------------------------------------------------------------------
126 // Non-product mode code
127 #ifndef PRODUCT
128 
129 void CompiledDirectCall::verify() {
130   // Verify call.
131   _call->verify();
132   _call->verify_alignment();
133 
134   // Verify stub.
135   address stub = find_stub();
136   assert(stub != nullptr, "no stub found for static call");
137   // Creation also verifies the object.
138   NativeMovConstReg* method_holder
139     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
140   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
141 
142   // Verify state.
143   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
144 }
145 
146 #endif // !PRODUCT