1 /*
  2  * Copyright (c) 2002, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2012, 2021 SAP SE. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #ifndef CPU_PPC_NATIVEINST_PPC_HPP
 27 #define CPU_PPC_NATIVEINST_PPC_HPP
 28 
 29 #include "asm/macroAssembler.hpp"
 30 #include "runtime/icache.hpp"
 31 #include "runtime/os.hpp"
 32 #include "runtime/safepointMechanism.hpp"
 33 #ifdef COMPILER2
 34 #include "opto/c2_globals.hpp"
 35 #endif
 36 
 37 // We have interfaces for the following instructions:
 38 //
 39 // - NativeInstruction
 40 //   - NativeCall
 41 //   - NativeFarCall
 42 //   - NativeMovConstReg
 43 //   - NativeJump
 44 //   - NativeIllegalInstruction
 45 //   - NativeConditionalFarBranch
 46 //   - NativeCallTrampolineStub
 47 
 48 // The base class for different kinds of native instruction abstractions.
 49 // It provides the primitive operations to manipulate code relative to this.
 50 class NativeInstruction {
 51   friend class Relocation;
 52 
 53  public:
 54   bool is_jump() { return Assembler::is_b(long_at(0)); } // See NativeGeneralJump.
 55 
 56   bool is_sigtrap_ic_miss_check() {
 57     assert(UseSIGTRAP, "precondition");
 58     return MacroAssembler::is_trap_ic_miss_check(long_at(0));
 59   }
 60 
 61   bool is_sigtrap_null_check() {
 62     assert(UseSIGTRAP && TrapBasedNullChecks, "precondition");
 63     return MacroAssembler::is_trap_null_check(long_at(0));
 64   }
 65 
 66   int get_stop_type() {
 67     return MacroAssembler::tdi_get_si16(long_at(0), Assembler::traptoUnconditional, 0);
 68   }
 69 
 70   // We use an illtrap for marking a method as not_entrant or zombie.
 71   bool is_sigill_zombie_not_entrant() {
 72     // Work around a C++ compiler bug which changes 'this'.
 73     return NativeInstruction::is_sigill_zombie_not_entrant_at(addr_at(0));
 74   }
 75   static bool is_sigill_zombie_not_entrant_at(address addr);
 76 
 77 #ifdef COMPILER2
 78   // SIGTRAP-based implicit range checks
 79   bool is_sigtrap_range_check() {
 80     assert(UseSIGTRAP && TrapBasedRangeChecks, "precondition");
 81     return MacroAssembler::is_trap_range_check(long_at(0));
 82   }
 83 #endif
 84 
 85   bool is_safepoint_poll() {
 86     // The current arguments of the instruction are not checked!
 87     if (USE_POLL_BIT_ONLY) {
 88       int encoding = SafepointMechanism::poll_bit();
 89       return MacroAssembler::is_tdi(long_at(0), Assembler::traptoGreaterThanUnsigned | Assembler::traptoEqual,
 90                                     -1, encoding);
 91     }
 92     return MacroAssembler::is_load_from_polling_page(long_at(0), NULL);
 93   }
 94 
 95   bool is_safepoint_poll_return() {
 96     // Safepoint poll at nmethod return with watermark check.
 97     return MacroAssembler::is_td(long_at(0), Assembler::traptoGreaterThanUnsigned,
 98                                  /* R1_SP */ 1, /* any reg */ -1);
 99   }
100 
101   address get_stack_bang_address(void *ucontext) {
102     // If long_at(0) is not a stack bang, return 0. Otherwise, return
103     // banged address.
104     return MacroAssembler::get_stack_bang_address(long_at(0), ucontext);
105   }
106 
107  protected:
108   address  addr_at(int offset) const    { return address(this) + offset; }
109   int      long_at(int offset) const    { return *(int*)addr_at(offset); }
110 
111  public:
112   void verify() NOT_DEBUG_RETURN;
113 };
114 
115 inline NativeInstruction* nativeInstruction_at(address address) {
116   NativeInstruction* inst = (NativeInstruction*)address;
117   inst->verify();
118   return inst;
119 }
120 
121 // The NativeCall is an abstraction for accessing/manipulating call
122 // instructions. It is used to manipulate inline caches, primitive &
123 // dll calls, etc.
124 //
125 // Sparc distinguishes `NativeCall' and `NativeFarCall'. On PPC64,
126 // at present, we provide a single class `NativeCall' representing the
127 // sequence `load_const, mtctr, bctrl' or the sequence 'ld_from_toc,
128 // mtctr, bctrl'.
129 class NativeCall: public NativeInstruction {
130  public:
131 
132   enum ppc_specific_constants {
133     load_const_instruction_size                 = 28,
134     load_const_from_method_toc_instruction_size = 16,
135     instruction_size                            = 16 // Used in shared code for calls with reloc_info.
136   };
137 
138   static bool is_call_at(address a) {
139     return Assembler::is_bl(*(int*)(a));
140   }
141 
142   static bool is_call_before(address return_address) {
143     return NativeCall::is_call_at(return_address - 4);
144   }
145 
146   address instruction_address() const {
147     return addr_at(0);
148   }
149 
150   address next_instruction_address() const {
151     // We have only bl.
152     assert(MacroAssembler::is_bl(*(int*)instruction_address()), "Should be bl instruction!");
153     return addr_at(4);
154   }
155 
156   address return_address() const {
157     return next_instruction_address();
158   }
159 
160   address destination() const;
161 
162   // The parameter assert_lock disables the assertion during code generation.
163   void set_destination_mt_safe(address dest, bool assert_lock = true);
164 
165   address get_trampoline();
166 
167   void verify_alignment() {} // do nothing on ppc
168   void verify() NOT_DEBUG_RETURN;
169 };
170 
171 inline NativeCall* nativeCall_at(address instr) {
172   NativeCall* call = (NativeCall*)instr;
173   call->verify();
174   return call;
175 }
176 
177 inline NativeCall* nativeCall_before(address return_address) {
178   NativeCall* call = NULL;
179   if (MacroAssembler::is_bl(*(int*)(return_address - 4)))
180     call = (NativeCall*)(return_address - 4);
181   call->verify();
182   return call;
183 }
184 
185 // The NativeFarCall is an abstraction for accessing/manipulating native
186 // call-anywhere instructions.
187 // Used to call native methods which may be loaded anywhere in the address
188 // space, possibly out of reach of a call instruction.
189 class NativeFarCall: public NativeInstruction {
190  public:
191   // We use MacroAssembler::bl64_patchable() for implementing a
192   // call-anywhere instruction.
193 
194   // Checks whether instr points at a NativeFarCall instruction.
195   static bool is_far_call_at(address instr) {
196     return MacroAssembler::is_bl64_patchable_at(instr);
197   }
198 
199   // Does the NativeFarCall implementation use a pc-relative encoding
200   // of the call destination?
201   // Used when relocating code.
202   bool is_pcrelative() {
203     assert(MacroAssembler::is_bl64_patchable_at((address)this),
204            "unexpected call type");
205     return MacroAssembler::is_bl64_patchable_pcrelative_at((address)this);
206   }
207 
208   // Returns the NativeFarCall's destination.
209   address destination() const {
210     assert(MacroAssembler::is_bl64_patchable_at((address)this),
211            "unexpected call type");
212     return MacroAssembler::get_dest_of_bl64_patchable_at((address)this);
213   }
214 
215   // Sets the NativeCall's destination, not necessarily mt-safe.
216   // Used when relocating code.
217   void set_destination(address dest) {
218     // Set new destination (implementation of call may change here).
219     assert(MacroAssembler::is_bl64_patchable_at((address)this),
220            "unexpected call type");
221     MacroAssembler::set_dest_of_bl64_patchable_at((address)this, dest);
222   }
223 
224   void verify() NOT_DEBUG_RETURN;
225 };
226 
227 // Instantiates a NativeFarCall object starting at the given instruction
228 // address and returns the NativeFarCall object.
229 inline NativeFarCall* nativeFarCall_at(address instr) {
230   NativeFarCall* call = (NativeFarCall*)instr;
231   call->verify();
232   return call;
233 }
234 
235 // An interface for accessing/manipulating native set_oop imm, reg instructions
236 // (used to manipulate inlined data references, etc.).
237 class NativeMovConstReg: public NativeInstruction {
238  public:
239 
240   enum ppc_specific_constants {
241     load_const_instruction_size                 = 20,
242     load_const_from_method_toc_instruction_size =  8,
243     instruction_size                            =  8 // Used in shared code for calls with reloc_info.
244   };
245 
246   address instruction_address() const {
247     return addr_at(0);
248   }
249 
250   address next_instruction_address() const;
251 
252   // (The [set_]data accessor respects oop_type relocs also.)
253   intptr_t data() const;
254 
255   // Patch the code stream.
256   address set_data_plain(intptr_t x, CodeBlob *code);
257   // Patch the code stream and oop pool.
258   void set_data(intptr_t x);
259 
260   // Patch narrow oop constants. Use this also for narrow klass.
261   void set_narrow_oop(narrowOop data, CodeBlob *code = NULL);
262 
263   void verify() NOT_DEBUG_RETURN;
264 };
265 
266 inline NativeMovConstReg* nativeMovConstReg_at(address address) {
267   NativeMovConstReg* test = (NativeMovConstReg*)address;
268   test->verify();
269   return test;
270 }
271 
272 // The NativeJump is an abstraction for accessing/manipulating native
273 // jump-anywhere instructions.
274 class NativeJump: public NativeInstruction {
275  public:
276   // We use MacroAssembler::b64_patchable() for implementing a
277   // jump-anywhere instruction.
278 
279   enum ppc_specific_constants {
280     instruction_size = MacroAssembler::b64_patchable_size
281   };
282 
283   // Checks whether instr points at a NativeJump instruction.
284   static bool is_jump_at(address instr) {
285     return MacroAssembler::is_b64_patchable_at(instr)
286       || (   MacroAssembler::is_load_const_from_method_toc_at(instr)
287           && Assembler::is_mtctr(*(int*)(instr + 2 * 4))
288           && Assembler::is_bctr(*(int*)(instr + 3 * 4)));
289   }
290 
291   // Does the NativeJump implementation use a pc-relative encoding
292   // of the call destination?
293   // Used when relocating code or patching jumps.
294   bool is_pcrelative() {
295     return MacroAssembler::is_b64_patchable_pcrelative_at((address)this);
296   }
297 
298   // Returns the NativeJump's destination.
299   address jump_destination() const {
300     if (MacroAssembler::is_b64_patchable_at((address)this)) {
301       return MacroAssembler::get_dest_of_b64_patchable_at((address)this);
302     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
303                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
304                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
305       return (address)((NativeMovConstReg *)this)->data();
306     } else {
307       ShouldNotReachHere();
308       return NULL;
309     }
310   }
311 
312   // Sets the NativeJump's destination, not necessarily mt-safe.
313   // Used when relocating code or patching jumps.
314   void set_jump_destination(address dest) {
315     // Set new destination (implementation of call may change here).
316     if (MacroAssembler::is_b64_patchable_at((address)this)) {
317       MacroAssembler::set_dest_of_b64_patchable_at((address)this, dest);
318     } else if (MacroAssembler::is_load_const_from_method_toc_at((address)this)
319                && Assembler::is_mtctr(*(int*)((address)this + 2 * 4))
320                && Assembler::is_bctr(*(int*)((address)this + 3 * 4))) {
321       ((NativeMovConstReg *)this)->set_data((intptr_t)dest);
322     } else {
323       ShouldNotReachHere();
324     }
325   }
326 
327   // MT-safe insertion of native jump at verified method entry
328   static void patch_verified_entry(address entry, address verified_entry, address dest);
329 
330   void verify() NOT_DEBUG_RETURN;
331 
332   static void check_verified_entry_alignment(address entry, address verified_entry) {
333     // We just patch one instruction on ppc64, so the jump doesn't have to
334     // be aligned. Nothing to do here.
335   }
336 };
337 
338 // Instantiates a NativeJump object starting at the given instruction
339 // address and returns the NativeJump object.
340 inline NativeJump* nativeJump_at(address instr) {
341   NativeJump* call = (NativeJump*)instr;
342   call->verify();
343   return call;
344 }
345 
346 // NativeConditionalFarBranch is abstraction for accessing/manipulating
347 // conditional far branches.
348 class NativeConditionalFarBranch : public NativeInstruction {
349  public:
350 
351   static bool is_conditional_far_branch_at(address instr) {
352     return MacroAssembler::is_bc_far_at(instr);
353   }
354 
355   address branch_destination() const {
356     return MacroAssembler::get_dest_of_bc_far_at((address)this);
357   }
358 
359   void set_branch_destination(address dest) {
360     MacroAssembler::set_dest_of_bc_far_at((address)this, dest);
361   }
362 };
363 
364 inline NativeConditionalFarBranch* NativeConditionalFarBranch_at(address address) {
365   assert(NativeConditionalFarBranch::is_conditional_far_branch_at(address),
366          "must be a conditional far branch");
367   return (NativeConditionalFarBranch*)address;
368 }
369 
370 // Call trampoline stubs.
371 class NativeCallTrampolineStub : public NativeInstruction {
372  private:
373 
374   address encoded_destination_addr() const;
375 
376  public:
377 
378   address destination(nmethod *nm = NULL) const;
379   int destination_toc_offset() const;
380 
381   void set_destination(address new_destination);
382 };
383 
384 // Note: Other stubs must not begin with this pattern.
385 inline bool is_NativeCallTrampolineStub_at(address address) {
386   int first_instr = *(int*)address;
387   // calculate_address_from_global_toc and long form of ld_largeoffset_unchecked begin with addis with target R12
388   if (Assembler::is_addis(first_instr) &&
389       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2) return true;
390 
391   // short form of ld_largeoffset_unchecked is ld which is followed by mtctr
392   int second_instr = *((int*)address + 1);
393   if (Assembler::is_ld(first_instr) &&
394       (Register)(intptr_t)Assembler::inv_rt_field(first_instr) == R12_scratch2 &&
395       Assembler::is_mtctr(second_instr) &&
396       (Register)(intptr_t)Assembler::inv_rs_field(second_instr) == R12_scratch2) return true;
397 
398   return false;
399 }
400 
401 inline NativeCallTrampolineStub* NativeCallTrampolineStub_at(address address) {
402   assert(is_NativeCallTrampolineStub_at(address), "no call trampoline found");
403   return (NativeCallTrampolineStub*)address;
404 }
405 
406 ///////////////////////////////////////////////////////////////////////////////////////////////////
407 
408 //-------------------------------------
409 //  N a t i v e G e n e r a l J u m p
410 //-------------------------------------
411 
412 // Despite the name, handles only simple branches.
413 class NativeGeneralJump;
414 inline NativeGeneralJump* nativeGeneralJump_at(address address);
415 
416 // Currently only implemented as single unconditional branch.
417 class NativeGeneralJump: public NativeInstruction {
418  public:
419 
420   enum PPC64_specific_constants {
421     instruction_size = 4
422   };
423 
424   address instruction_address() const { return addr_at(0); }
425 
426   // Creation.
427   friend inline NativeGeneralJump* nativeGeneralJump_at(address addr) {
428     NativeGeneralJump* jump = (NativeGeneralJump*)(addr);
429     DEBUG_ONLY( jump->verify(); )
430     return jump;
431   }
432 
433   // Insertion of native general jump instruction.
434   static void insert_unconditional(address code_pos, address entry);
435 
436   address jump_destination() const {
437     DEBUG_ONLY( verify(); )
438     return addr_at(0) + Assembler::inv_li_field(long_at(0));
439   }
440 
441   void set_jump_destination(address dest) {
442     DEBUG_ONLY( verify(); )
443     insert_unconditional(addr_at(0), dest);
444   }
445 
446   static void replace_mt_safe(address instr_addr, address code_buffer);
447 
448   void verify() const { guarantee(Assembler::is_b(long_at(0)), "invalid NativeGeneralJump"); }
449 };
450 
451 // An interface for accessing/manipulating native load int (load_const32).
452 class NativeMovRegMem;
453 inline NativeMovRegMem* nativeMovRegMem_at(address address);
454 class NativeMovRegMem: public NativeInstruction {
455  public:
456 
457   enum PPC64_specific_constants {
458     instruction_size = 8
459   };
460 
461   address instruction_address() const { return addr_at(0); }
462 
463   int num_bytes_to_end_of_patch() const { return instruction_size; }
464 
465   intptr_t offset() const {
466 #ifdef VM_LITTLE_ENDIAN
467     short *hi_ptr = (short*)(addr_at(0));
468     short *lo_ptr = (short*)(addr_at(4));
469 #else
470     short *hi_ptr = (short*)(addr_at(0) + 2);
471     short *lo_ptr = (short*)(addr_at(4) + 2);
472 #endif
473     return ((*hi_ptr) << 16) | ((*lo_ptr) & 0xFFFF);
474   }
475 
476   void set_offset(intptr_t x) {
477 #ifdef VM_LITTLE_ENDIAN
478     short *hi_ptr = (short*)(addr_at(0));
479     short *lo_ptr = (short*)(addr_at(4));
480 #else
481     short *hi_ptr = (short*)(addr_at(0) + 2);
482     short *lo_ptr = (short*)(addr_at(4) + 2);
483 #endif
484     *hi_ptr = x >> 16;
485     *lo_ptr = x & 0xFFFF;
486     ICache::ppc64_flush_icache_bytes(addr_at(0), NativeMovRegMem::instruction_size);
487   }
488 
489   void add_offset_in_bytes(intptr_t radd_offset) {
490     set_offset(offset() + radd_offset);
491   }
492 
493   void verify() const {
494     guarantee(Assembler::is_lis(long_at(0)), "load_const32 1st instr");
495     guarantee(Assembler::is_ori(long_at(4)), "load_const32 2nd instr");
496   }
497 
498  private:
499   friend inline NativeMovRegMem* nativeMovRegMem_at(address address) {
500     NativeMovRegMem* test = (NativeMovRegMem*)address;
501     DEBUG_ONLY( test->verify(); )
502     return test;
503   }
504 };
505 
506 class NativePostCallNop: public NativeInstruction {
507 public:
508   bool check() const { Unimplemented(); return false; }
509   int displacement() const { Unimplemented(); return 0; }
510   void patch(jint diff) { Unimplemented(); }
511   void make_deopt() { Unimplemented(); }
512 };
513 
514 inline NativePostCallNop* nativePostCallNop_at(address address) {
515   Unimplemented();
516   return NULL;
517 }
518 
519 class NativeDeoptInstruction: public NativeInstruction {
520 public:
521   address instruction_address() const       { Unimplemented(); return NULL; }
522   address next_instruction_address() const  { Unimplemented(); return NULL; }
523 
524   void  verify() { Unimplemented(); }
525 
526   static bool is_deopt_at(address instr) {
527     Unimplemented();
528     return false;
529   }
530 
531   // MT-safe patching
532   static void insert(address code_pos) {
533     Unimplemented();
534   }
535 };
536 
537 #endif // CPU_PPC_NATIVEINST_PPC_HPP