1 /*
  2  * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
  4  * Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
  5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  6  *
  7  * This code is free software; you can redistribute it and/or modify it
  8  * under the terms of the GNU General Public License version 2 only, as
  9  * published by the Free Software Foundation.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #include "precompiled.hpp"
 28 #include "asm/macroAssembler.inline.hpp"
 29 #include "code/compiledIC.hpp"
 30 #include "code/icBuffer.hpp"
 31 #include "code/nmethod.hpp"
 32 #include "memory/resourceArea.hpp"
 33 #include "runtime/mutexLocker.hpp"
 34 #include "runtime/safepoint.hpp"
 35 
 36 // ----------------------------------------------------------------------------
 37 
 38 #define __ _masm.
 39 address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
 40   precond(cbuf.stubs()->start() != badAddress);
 41   precond(cbuf.stubs()->end() != badAddress);
 42   // Stub is fixed up when the corresponding call is converted from
 43   // calling compiled code to calling interpreted code.
 44   // mv xmethod, 0
 45   // jalr -4 # to self
 46 
 47   if (mark == NULL) {
 48     mark = cbuf.insts_mark();  // Get mark within main instrs section.
 49   }
 50 
 51   // Note that the code buffer's insts_mark is always relative to insts.
 52   // That's why we must use the macroassembler to generate a stub.
 53   MacroAssembler _masm(&cbuf);
 54 
 55   address base = __ start_a_stub(to_interp_stub_size());
 56   int offset = __ offset();
 57   if (base == NULL) {
 58     return NULL;  // CodeBuffer::expand failed
 59   }
 60   // static stub relocation stores the instruction address of the call
 61   __ relocate(static_stub_Relocation::spec(mark));
 62 
 63   __ emit_static_call_stub();
 64 
 65   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
 66   __ end_a_stub();
 67   return base;
 68 }
 69 #undef __
 70 
 71 int CompiledStaticCall::to_interp_stub_size() {
 72   // fence_i + fence* + (lui, addi, slli, addi, slli, addi) + (lui, addi, slli, addi, slli) + jalr
 73   return NativeFenceI::instruction_size() + 12 * NativeInstruction::instruction_size;
 74 }
 75 
 76 int CompiledStaticCall::to_trampoline_stub_size() {
 77   // Somewhat pessimistically, we count 4 instructions here (although
 78   // there are only 3) because we sometimes emit an alignment nop.
 79   // Trampoline stubs are always word aligned.
 80   return NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size;
 81 }
 82 
 83 // Relocation entries for call stub, compiled java to interpreter.
 84 int CompiledStaticCall::reloc_to_interp_stub() {
 85   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
 86 }
 87 
 88 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 89   address stub = find_stub();
 90   guarantee(stub != NULL, "stub not found");
 91 
 92   if (TraceICs) {
 93     ResourceMark rm;
 94     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 95                   p2i(instruction_address()),
 96                   callee->name_and_sig_as_C_string());
 97   }
 98 
 99   // Creation also verifies the object.
100   NativeMovConstReg* method_holder
101     = nativeMovConstReg_at(stub + NativeFenceI::instruction_size());
102 #ifdef ASSERT
103   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
104 
105   verify_mt_safe(callee, entry, method_holder, jump);
106 #endif
107   // Update stub.
108   method_holder->set_data((intptr_t)callee());
109   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
110   ICache::invalidate_range(stub, to_interp_stub_size());
111   // Update jump to call.
112   set_destination_mt_safe(stub);
113 }
114 
115 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
116   // Reset stub.
117   address stub = static_stub->addr();
118   assert(stub != NULL, "stub not found");
119   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
120   // Creation also verifies the object.
121   NativeMovConstReg* method_holder
122     = nativeMovConstReg_at(stub + NativeFenceI::instruction_size());
123   method_holder->set_data(0);
124   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
125   jump->set_jump_destination((address)-1);
126 }
127 
128 //-----------------------------------------------------------------------------
129 // Non-product mode code
130 #ifndef PRODUCT
131 
132 void CompiledDirectStaticCall::verify() {
133   // Verify call.
134   _call->verify();
135   _call->verify_alignment();
136 
137   // Verify stub.
138   address stub = find_stub();
139   assert(stub != NULL, "no stub found for static call");
140   // Creation also verifies the object.
141   NativeMovConstReg* method_holder
142     = nativeMovConstReg_at(stub + NativeFenceI::instruction_size());
143   NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
144 
145   // Verify state.
146   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
147 }
148 
149 #endif // !PRODUCT