1 /*
  2  * Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "code/nativeInst.hpp"
 29 #include "gc/shared/barrierSetNMethod.hpp"
 30 #include "logging/log.hpp"
 31 #include "memory/resourceArea.hpp"
 32 #include "runtime/sharedRuntime.hpp"
 33 #include "runtime/registerMap.hpp"
 34 #include "runtime/thread.hpp"
 35 #include "utilities/align.hpp"
 36 #include "utilities/debug.hpp"
 37 
 38 class NativeNMethodBarrier: public NativeInstruction {
 39   address instruction_address() const { return addr_at(0); }
 40 
 41   int *guard_addr() {
 42     /* auipc + lwu + fence + lwu + beq + lui + addi + slli + addi + slli + jalr + j */
 43     return reinterpret_cast<int*>(instruction_address() + 12 * 4);
 44   }
 45 
 46 public:
 47   int get_value() {
 48     return Atomic::load_acquire(guard_addr());
 49   }
 50 
 51   void set_value(int value) {
 52     Atomic::release_store(guard_addr(), value);
 53   }
 54 
 55   void verify() const;
 56 };
 57 
 58 // Store the instruction bitmask, bits and name for checking the barrier.
 59 struct CheckInsn {
 60   uint32_t mask;
 61   uint32_t bits;
 62   const char *name;
 63 };
 64 
 65 static const struct CheckInsn barrierInsn[] = {
 66   { 0x00000fff, 0x00000297, "auipc  t0, 0           "},
 67   { 0x000fffff, 0x0002e283, "lwu    t0, 48(t0)      "},
 68   { 0xffffffff, 0x0aa0000f, "fence  ir, ir          "},
 69   { 0x000fffff, 0x000be303, "lwu    t1, 112(xthread)"},
 70   { 0x01fff07f, 0x00628063, "beq    t0, t1, skip    "},
 71   { 0x00000fff, 0x000002b7, "lui    t0, imm0        "},
 72   { 0x000fffff, 0x00028293, "addi   t0, t0, imm1    "},
 73   { 0xffffffff, 0x00b29293, "slli   t0, t0, 11      "},
 74   { 0x000fffff, 0x00028293, "addi   t0, t0, imm2    "},
 75   { 0xffffffff, 0x00529293, "slli   t0, t0, 5       "},
 76   { 0x000fffff, 0x000280e7, "jalr   lr, imm3(t0)    "},
 77   { 0x00000fff, 0x0000006f, "j      skip            "}
 78   /* guard: */
 79   /* 32bit nmethod guard value */
 80   /* skip: */
 81 };
 82 
 83 // The encodings must match the instructions emitted by
 84 // BarrierSetAssembler::nmethod_entry_barrier. The matching ignores the specific
 85 // register numbers and immediate values in the encoding.
 86 void NativeNMethodBarrier::verify() const {
 87   intptr_t addr = (intptr_t) instruction_address();
 88   for(unsigned int i = 0; i < sizeof(barrierInsn)/sizeof(struct CheckInsn); i++ ) {
 89     uint32_t inst = *((uint32_t*) addr);
 90     if ((inst & barrierInsn[i].mask) != barrierInsn[i].bits) {
 91       tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", addr, inst);
 92       fatal("not an %s instruction.", barrierInsn[i].name);
 93     }
 94     addr += 4;
 95   }
 96 }
 97 
 98 
 99 /* We're called from an nmethod when we need to deoptimize it. We do
100    this by throwing away the nmethod's frame and jumping to the
101    ic_miss stub. This looks like there has been an IC miss at the
102    entry of the nmethod, so we resolve the call, which will fall back
103    to the interpreter if the nmethod has been unloaded. */
104 void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {
105 
106   typedef struct {
107     intptr_t *sp; intptr_t *fp; address lr; address pc;
108   } frame_pointers_t;
109 
110   frame_pointers_t *new_frame = (frame_pointers_t *)(return_address_ptr - 5);
111 
112   JavaThread *thread = JavaThread::current();
113   RegisterMap reg_map(thread, false);
114   frame frame = thread->last_frame();
115 
116   assert(frame.is_compiled_frame() || frame.is_native_frame(), "must be");
117   assert(frame.cb() == nm, "must be");
118   frame = frame.sender(&reg_map);
119 
120   LogTarget(Trace, nmethod, barrier) out;
121   if (out.is_enabled()) {
122     ResourceMark mark;
123     log_trace(nmethod, barrier)("deoptimize(nmethod: %s(%p), return_addr: %p, osr: %d, thread: %p(%s), making rsp: %p) -> %p",
124                                 nm->method()->name_and_sig_as_C_string(),
125                                 nm, *(address *) return_address_ptr, nm->is_osr_method(), thread,
126                                 thread->name(), frame.sp(), nm->verified_entry_point());
127   }
128 
129   new_frame->sp = frame.sp();
130   new_frame->fp = frame.fp();
131   new_frame->lr = frame.pc();
132   new_frame->pc = SharedRuntime::get_handle_wrong_method_stub();
133 }
134 
135 // This is the offset of the entry barrier from where the frame is completed.
136 // If any code changes between the end of the verified entry where the entry
137 // barrier resides, and the completion of the frame, then
138 // NativeNMethodCmpBarrier::verify() will immediately complain when it does
139 // not find the expected native instruction at this offset, which needs updating.
140 // Note that this offset is invariant of PreserveFramePointer.
141 
142 // see BarrierSetAssembler::nmethod_entry_barrier
143 // auipc + lwu + fence + lwu + beq + movptr_with_offset(5 instructions) + jalr + j + int32
144 static const int entry_barrier_offset = -4 * 13;
145 
146 static NativeNMethodBarrier* native_nmethod_barrier(nmethod* nm) {
147   address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset;
148   NativeNMethodBarrier* barrier = reinterpret_cast<NativeNMethodBarrier*>(barrier_address);
149   debug_only(barrier->verify());
150   return barrier;
151 }
152 
153 void BarrierSetNMethod::disarm(nmethod* nm) {
154   if (!supports_entry_barrier(nm)) {
155     return;
156   }
157 
158   // Disarms the nmethod guard emitted by BarrierSetAssembler::nmethod_entry_barrier.
159   NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
160 
161   barrier->set_value(disarmed_value());
162 }
163 
164 bool BarrierSetNMethod::is_armed(nmethod* nm) {
165   if (!supports_entry_barrier(nm)) {
166     return false;
167   }
168 
169   NativeNMethodBarrier* barrier = native_nmethod_barrier(nm);
170   return barrier->get_value() != disarmed_value();
171 }