1 /*
  2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2012, 2020 SAP SE. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "asm/macroAssembler.inline.hpp"
 28 #include "code/compiledIC.hpp"
 29 #include "memory/resourceArea.hpp"
 30 #include "nativeInst_ppc.hpp"
 31 #include "oops/compressedOops.inline.hpp"
 32 #include "oops/oop.hpp"
 33 #include "runtime/handles.hpp"
 34 #include "runtime/orderAccess.hpp"
 35 #include "runtime/safepoint.hpp"
 36 #include "runtime/sharedRuntime.hpp"
 37 #include "runtime/stubRoutines.hpp"
 38 #include "utilities/ostream.hpp"
 39 #ifdef COMPILER1
 40 #include "c1/c1_Runtime1.hpp"
 41 #endif
 42 
 43 // We use an illtrap for marking a method as not_entrant
 44 // Work around a C++ compiler bug which changes 'this'
 45 bool NativeInstruction::is_sigill_not_entrant_at(address addr) {
 46   if (!Assembler::is_illtrap(addr)) return false;
 47   CodeBlob* cb = CodeCache::find_blob(addr);
 48   if (cb == nullptr || !cb->is_nmethod()) return false;
 49   nmethod *nm = (nmethod *)cb;
 50   // This method is not_entrant iff the illtrap instruction is
 51   // located at the verified entry point.
 52   return nm->verified_entry_point() == addr;
 53 }
 54 
 55 #ifdef ASSERT
 56 void NativeInstruction::verify() {
 57   // Make sure code pattern is actually an instruction address.
 58   address addr = addr_at(0);
 59   if (addr == 0 || ((intptr_t)addr & 3) != 0) {
 60     fatal("not an instruction address");
 61   }
 62 }
 63 #endif // ASSERT
 64 
 65 // Extract call destination from a NativeCall. The call might use a trampoline stub.
 66 address NativeCall::destination() const {
 67   address addr = (address)this;
 68   address destination = Assembler::bxx_destination(addr);
 69 
 70   // Do we use a trampoline stub for this call?
 71   // Trampoline stubs are located behind the main code.
 72   if (destination > addr) {
 73     // Filter out recursive method invocation (call to verified/unverified entry point).
 74     CodeBlob* cb = CodeCache::find_blob(addr);
 75     assert(cb && cb->is_nmethod(), "sanity");
 76     nmethod *nm = (nmethod *)cb;
 77     if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
 78       // Yes we do, so get the destination from the trampoline stub.
 79       const address trampoline_stub_addr = destination;
 80       destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
 81     }
 82   }
 83 
 84   return destination;
 85 }
 86 
 87 // Similar to replace_mt_safe, but just changes the destination. The
 88 // important thing is that free-running threads are able to execute this
 89 // call instruction at all times. Thus, the displacement field must be
 90 // instruction-word-aligned.
 91 //
 92 // Used in the runtime linkage of calls; see class CompiledIC.
 93 //
 94 // Add parameter assert_lock to switch off assertion
 95 // during code generation, where no lock is needed.
 96 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
 97   assert(!assert_lock ||
 98          (CodeCache_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) ||
 99          CompiledICLocker::is_safe(addr_at(0)),
100          "concurrent code patching");
101 
102   ResourceMark rm;
103   int code_size = 1 * BytesPerInstWord;
104   address addr_call = addr_at(0);
105   assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
106 
107   CodeBuffer cb(addr_call, code_size + 1);
108   MacroAssembler* a = new MacroAssembler(&cb);
109 
110   // Patch the call.
111   if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) {
112     address trampoline_stub_addr = get_trampoline();
113 
114     // We did not find a trampoline stub because the current codeblob
115     // does not provide this information. The branch will be patched
116     // later during a final fixup, when all necessary information is
117     // available.
118     if (trampoline_stub_addr == 0)
119       return;
120 
121     // Patch the constant in the call's trampoline stub.
122     NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
123     dest = trampoline_stub_addr;
124   }
125 
126   OrderAccess::release();
127   a->bl(dest);
128 
129   ICache::ppc64_flush_icache_bytes(addr_call, code_size);
130 }
131 
132 address NativeCall::get_trampoline() {
133   address call_addr = addr_at(0);
134 
135   CodeBlob *code = CodeCache::find_blob(call_addr);
136   assert(code != nullptr, "Could not find the containing code blob");
137 
138   // There are no relocations available when the code gets relocated
139   // because of CodeBuffer expansion.
140   if (code->relocation_size() == 0)
141     return nullptr;
142 
143   address bl_destination = Assembler::bxx_destination(call_addr);
144   if (code->contains(bl_destination) &&
145       is_NativeCallTrampolineStub_at(bl_destination))
146     return bl_destination;
147 
148   // If the codeBlob is not a nmethod, this is because we get here from the
149   // CodeBlob constructor, which is called within the nmethod constructor.
150   return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
151 }
152 
153 #ifdef ASSERT
154 void NativeCall::verify() {
155   address addr = addr_at(0);
156 
157   if (!NativeCall::is_call_at(addr)) {
158     tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
159     // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
160     fatal("not a NativeCall at " PTR_FORMAT, p2i(addr));
161   }
162 }
163 #endif // ASSERT
164 
165 #ifdef ASSERT
166 void NativeFarCall::verify() {
167   address addr = addr_at(0);
168 
169   NativeInstruction::verify();
170   if (!NativeFarCall::is_far_call_at(addr)) {
171     tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
172     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
173     fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
174   }
175 }
176 #endif // ASSERT
177 
178 address NativeMovConstReg::next_instruction_address() const {
179 #ifdef ASSERT
180   CodeBlob* nm = CodeCache::find_blob(instruction_address());
181   assert(nm != nullptr, "Could not find code blob");
182   assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
183 #endif
184 
185   if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
186     return addr_at(load_const_from_method_toc_instruction_size);
187   } else {
188     return addr_at(load_const_instruction_size);
189   }
190 }
191 
192 intptr_t NativeMovConstReg::data() const {
193   address   addr = addr_at(0);
194 
195   if (MacroAssembler::is_load_const_at(addr)) {
196     return MacroAssembler::get_const(addr);
197   }
198 
199   CodeBlob* cb = CodeCache::find_blob(addr);
200   assert(cb != nullptr, "Could not find code blob");
201   if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
202     narrowOop no = MacroAssembler::get_narrow_oop(addr, cb->content_begin());
203     // We can reach here during GC with 'no' pointing to new object location
204     // while 'heap()->is_in' still reports false (e.g. with SerialGC).
205     // Therefore we use raw decoding.
206     if (CompressedOops::is_null(no)) return 0;
207     return cast_from_oop<intptr_t>(CompressedOops::decode_raw(no));
208   } else if (MacroAssembler::is_load_const_from_method_toc_at(addr)) {
209     address ctable = cb->content_begin();
210     int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
211     return *(intptr_t *)(ctable + offset);
212   } else {
213     assert(MacroAssembler::is_calculate_address_from_global_toc_at(addr, addr - BytesPerInstWord),
214            "must be calculate_address_from_global_toc");
215     return (intptr_t) MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, addr - BytesPerInstWord);
216   }
217 }
218 
219 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
220   address addr         = instruction_address();
221   address next_address = nullptr;
222   if (!cb) cb = CodeCache::find_blob(addr);
223 
224   if (cb != nullptr && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
225     // A load from the method's TOC (ctable).
226     assert(cb->is_nmethod(), "must be nmethod");
227     const address ctable = cb->content_begin();
228     const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
229     *(intptr_t *)(ctable + toc_offset) = data;
230     next_address = addr + BytesPerInstWord;
231   } else if (cb != nullptr &&
232              MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
233     // A calculation relative to the global TOC.
234     if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
235         (address)data) {
236       const address inst2_addr = addr;
237       const address inst1_addr =
238         MacroAssembler::patch_calculate_address_from_global_toc_at(inst2_addr, cb->content_begin(),
239                                                                    (address)data);
240       assert(inst1_addr != nullptr && inst1_addr < inst2_addr, "first instruction must be found");
241       const int range = inst2_addr - inst1_addr + BytesPerInstWord;
242       ICache::ppc64_flush_icache_bytes(inst1_addr, range);
243     }
244     next_address = addr + 1 * BytesPerInstWord;
245   } else if (MacroAssembler::is_load_const_at(addr)) {
246     // A normal 5 instruction load_const code sequence.
247     if (MacroAssembler::get_const(addr) != (long)data) {
248       // This is not mt safe, ok in methods like CodeBuffer::copy_code().
249       MacroAssembler::patch_const(addr, (long)data);
250       ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
251     }
252     next_address = addr + 5 * BytesPerInstWord;
253   } else if (MacroAssembler::is_bl(* (int*) addr)) {
254     // A single branch-and-link instruction.
255     ResourceMark rm;
256     const int code_size = 1 * BytesPerInstWord;
257     CodeBuffer cb(addr, code_size + 1);
258     MacroAssembler* a = new MacroAssembler(&cb);
259     a->bl((address) data);
260     ICache::ppc64_flush_icache_bytes(addr, code_size);
261     next_address = addr + code_size;
262   } else {
263     ShouldNotReachHere();
264   }
265 
266   return next_address;
267 }
268 
269 void NativeMovConstReg::set_data(intptr_t data) {
270   // Store the value into the instruction stream.
271   CodeBlob *cb = CodeCache::find_blob(instruction_address());
272   address next_address = set_data_plain(data, cb);
273 
274   // Also store the value into an oop_Relocation cell, if any.
275   if (cb && cb->is_nmethod()) {
276     RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
277     oop* oop_addr = nullptr;
278     Metadata** metadata_addr = nullptr;
279     while (iter.next()) {
280       if (iter.type() == relocInfo::oop_type) {
281         oop_Relocation *r = iter.oop_reloc();
282         if (oop_addr == nullptr) {
283           oop_addr = r->oop_addr();
284           *oop_addr = cast_to_oop(data);
285         } else {
286           assert(oop_addr == r->oop_addr(), "must be only one set-oop here");
287         }
288       }
289       if (iter.type() == relocInfo::metadata_type) {
290         metadata_Relocation *r = iter.metadata_reloc();
291         if (metadata_addr == nullptr) {
292           metadata_addr = r->metadata_addr();
293           *metadata_addr = (Metadata*)data;
294         } else {
295           assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
296         }
297       }
298     }
299   }
300 }
301 
302 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = nullptr */) {
303   address   inst2_addr = addr_at(0);
304   CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
305   assert(cb != nullptr, "Could not find code blob");
306   if (MacroAssembler::get_narrow_oop(inst2_addr, cb->content_begin()) == data) {
307     return;
308   }
309   const address inst1_addr =
310     MacroAssembler::patch_set_narrow_oop(inst2_addr, cb->content_begin(), data);
311   assert(inst1_addr != nullptr && inst1_addr < inst2_addr, "first instruction must be found");
312   const int range = inst2_addr - inst1_addr + BytesPerInstWord;
313   ICache::ppc64_flush_icache_bytes(inst1_addr, range);
314 }
315 
316 // Do not use an assertion here. Let clients decide whether they only
317 // want this when assertions are enabled.
318 #ifdef ASSERT
319 void NativeMovConstReg::verify() {
320   address   addr = addr_at(0);
321   if (! MacroAssembler::is_load_const_at(addr) &&
322       ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
323     CodeBlob* cb = CodeCache::find_blob(addr);
324     if (! (cb != nullptr && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
325         ! (cb != nullptr && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
326         ! MacroAssembler::is_bl(*((int*) addr))) {
327       tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
328       // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
329       fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
330     }
331   }
332 }
333 #endif // ASSERT
334 
335 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
336   ResourceMark rm;
337   int code_size = 1 * BytesPerInstWord;
338   CodeBuffer cb(verified_entry, code_size + 1);
339   MacroAssembler* a = new MacroAssembler(&cb);
340 #ifdef COMPILER2
341   assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
342 #endif
343   // Patch this nmethod atomically. Always use illtrap/trap in debug build.
344   if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
345     a->b(dest);
346   } else {
347     // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
348     // We use an illtrap for marking a method as not_entrant.
349     a->illtrap();
350   }
351   ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
352 }
353 
354 #ifdef ASSERT
355 void NativeJump::verify() {
356   address addr = addr_at(0);
357 
358   NativeInstruction::verify();
359   if (!NativeJump::is_jump_at(addr)) {
360     tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
361     // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
362     fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
363   }
364 }
365 #endif // ASSERT
366 
367 
368 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
369   CodeBuffer cb(code_pos, BytesPerInstWord + 1);
370   MacroAssembler a(&cb);
371   a.b(entry);
372   ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
373 }
374 
375 // MT-safe patching of a jmp instruction.
376 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
377   // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
378 
379   // Finally patch out the jump.
380   volatile juint *jump_addr = (volatile juint*)instr_addr;
381   // Release not needed because caller uses invalidate_range after copying the remaining bytes.
382   //Atomic::release_store(jump_addr, *((juint*)code_buffer));
383   *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
384   ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
385 }
386 
387 
388 //-------------------------------------------------------------------
389 
390 // Call trampoline stubs.
391 //
392 // Layout and instructions of a call trampoline stub:
393 //    0:  load the TOC (part 1)
394 //    4:  load the TOC (part 2)
395 //    8:  load the call target from the constant pool (part 1)
396 //  [12:  load the call target from the constant pool (part 2, optional)]
397 //   ..:  branch via CTR
398 //
399 
400 address NativeCallTrampolineStub::encoded_destination_addr() const {
401   address instruction_addr = addr_at(0 * BytesPerInstWord);
402   if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
403     instruction_addr = addr_at(2 * BytesPerInstWord);
404     assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
405            "must be a ld with large offset (from the constant pool)");
406   }
407   return instruction_addr;
408 }
409 
410 address NativeCallTrampolineStub::destination(nmethod *nm) const {
411   CodeBlob* cb = nm ? nm : CodeCache::find_blob(addr_at(0));
412   assert(cb != nullptr, "Could not find code blob");
413   address ctable = cb->content_begin();
414 
415   return *(address*)(ctable + destination_toc_offset());
416 }
417 
418 int NativeCallTrampolineStub::destination_toc_offset() const {
419   return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
420 }
421 
422 void NativeCallTrampolineStub::set_destination(address new_destination) {
423   CodeBlob* cb = CodeCache::find_blob(addr_at(0));
424   assert(cb != nullptr, "Could not find code blob");
425   address ctable = cb->content_begin();
426 
427   *(address*)(ctable + destination_toc_offset()) = new_destination;
428 }
429 
430 void NativePostCallNop::make_deopt() {
431   NativeDeoptInstruction::insert(addr_at(0));
432 }
433 
434 bool NativePostCallNop::patch(int32_t oopmap_slot, int32_t cb_offset) {
435   int32_t i2, i1;
436   assert(is_aligned(cb_offset, 4), "cb offset alignment does not match instruction alignment");
437   assert(!decode(i1, i2), "already patched");
438 
439   cb_offset = cb_offset >> 2;
440   if (((oopmap_slot & ppc_oopmap_slot_mask) != oopmap_slot) || ((cb_offset & ppc_cb_offset_mask) != cb_offset)) {
441     return false;  // cannot encode
442   }
443   const uint32_t data = oopmap_slot << ppc_cb_offset_bits | cb_offset;
444   const uint32_t lo_data = data & ppc_data_lo_mask;
445   const uint32_t hi_data = data >> ppc_data_lo_bits;
446   const uint32_t nineth_bit = 1 << (31 - 9);
447   uint32_t instr = Assembler::CMPLI_OPCODE | hi_data << ppc_data_hi_shift | nineth_bit | lo_data;
448   *(uint32_t*)addr_at(0) = instr;
449 
450   int32_t oopmap_slot_dec, cb_offset_dec;
451   assert(is_post_call_nop(), "pcn not recognized");
452   assert(decode(oopmap_slot_dec, cb_offset_dec), "encoding failed");
453   assert(oopmap_slot == oopmap_slot_dec, "oopmap slot encoding is wrong");
454   assert((cb_offset << 2) == cb_offset_dec, "cb offset encoding is wrong");
455 
456   return true;  // encoding succeeded
457 }
458 
459 void NativeDeoptInstruction::verify() {
460 }
461 
462 bool NativeDeoptInstruction::is_deopt_at(address code_pos) {
463   if (!Assembler::is_illtrap(code_pos)) return false;
464   CodeBlob* cb = CodeCache::find_blob(code_pos);
465   if (cb == nullptr || !cb->is_nmethod()) return false;
466   nmethod *nm = (nmethod *)cb;
467   // see NativeInstruction::is_sigill_not_entrant_at()
468   return nm->verified_entry_point() != code_pos;
469 }
470 
471 // Inserts an instruction which is specified to cause a SIGILL at a given pc
472 void NativeDeoptInstruction::insert(address code_pos) {
473   ResourceMark rm;
474   int code_size = 1 * BytesPerInstWord;
475   CodeBuffer cb(code_pos, code_size + 1);
476   MacroAssembler* a = new MacroAssembler(&cb);
477   a->illtrap();
478   ICache::ppc64_flush_icache_bytes(code_pos, code_size);
479 }