1 /*
  2  * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2014, 2108, Red Hat Inc. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #ifndef CPU_AARCH64_NATIVEINST_AARCH64_HPP
 27 #define CPU_AARCH64_NATIVEINST_AARCH64_HPP
 28 
 29 #include "asm/assembler.hpp"
 30 #include "runtime/icache.hpp"
 31 #include "runtime/os.hpp"
 32 
 33 // We have interfaces for the following instructions:
 34 // - NativeInstruction
 35 // - - NativeCall
 36 // - - NativeMovConstReg
 37 // - - NativeMovConstRegPatching
 38 // - - NativeMovRegMem
 39 // - - NativeMovRegMemPatching
 40 // - - NativeJump
 41 // - - NativeIllegalOpCode
 42 // - - NativeGeneralJump
 43 // - - NativeReturn
 44 // - - NativeReturnX (return with argument)
 45 // - - NativePushConst
 46 // - - NativeTstRegMem
 47 
 48 // The base class for different kinds of native instruction abstractions.
 49 // Provides the primitive operations to manipulate code relative to this.
 50 
 51 class NativeCall;
 52 
 53 class NativeInstruction {
 54   friend class Relocation;
 55   friend bool is_NativeCallTrampolineStub_at(address);
 56 public:
 57   enum {
 58     instruction_size = 4
 59   };
 60 
 61   juint encoding() const {
 62     return uint_at(0);
 63   }
 64 
 65   bool is_blr() const {
 66     // blr(register) or br(register)
 67     return (encoding() & 0xff9ffc1f) == 0xd61f0000;
 68   }
 69   bool is_adr_aligned() const {
 70     // adr Xn, <label>, where label is aligned to 4 bytes (address of instruction).
 71     return (encoding() & 0xff000000) == 0x10000000;
 72   }
 73 
 74   inline bool is_nop() const;
 75   bool is_jump();
 76   bool is_general_jump();
 77   inline bool is_jump_or_nop();
 78   inline bool is_cond_jump();
 79   bool is_safepoint_poll();
 80   bool is_movz();
 81   bool is_movk();
 82   bool is_sigill_zombie_not_entrant();
 83   bool is_stop();
 84 
 85 protected:
 86   address addr_at(int offset) const { return address(this) + offset; }
 87 
 88   s_char sbyte_at(int offset) const { return *(s_char*)addr_at(offset); }
 89   u_char ubyte_at(int offset) const { return *(u_char*)addr_at(offset); }
 90   jint int_at(int offset) const { return *(jint*)addr_at(offset); }
 91   juint uint_at(int offset) const { return *(juint*)addr_at(offset); }
 92   address ptr_at(int offset) const { return *(address*)addr_at(offset); }
 93   oop oop_at(int offset) const { return *(oop*)addr_at(offset); }
 94 
 95   void set_char_at(int offset, char c) { *addr_at(offset) = (u_char)c; }
 96   void set_int_at(int offset, jint i) { *(jint*)addr_at(offset) = i; }
 97   void set_uint_at(int offset, jint i) { *(juint*)addr_at(offset) = i; }
 98   void set_ptr_at(int offset, address ptr) { *(address*)addr_at(offset) = ptr; }
 99   void set_oop_at(int offset, oop o) { *(oop*)addr_at(offset) = o; }
100 
101   void wrote(int offset);
102 
103 public:
104 
105   inline friend NativeInstruction* nativeInstruction_at(address address);
106 
107   static bool is_adrp_at(address instr);
108 
109   static bool is_ldr_literal_at(address instr);
110 
111   bool is_ldr_literal() {
112     return is_ldr_literal_at(addr_at(0));
113   }
114 
115   static bool is_ldrw_to_zr(address instr);
116 
117   static bool is_call_at(address instr) {
118     const uint32_t insn = (*(uint32_t*)instr);
119     return (insn >> 26) == 0b100101;
120   }
121 
122   bool is_call() {
123     return is_call_at(addr_at(0));
124   }
125 
126   static bool maybe_cpool_ref(address instr) {
127     return is_adrp_at(instr) || is_ldr_literal_at(instr);
128   }
129 
130   bool is_Membar() {
131     unsigned int insn = uint_at(0);
132     return Instruction_aarch64::extract(insn, 31, 12) == 0b11010101000000110011 &&
133       Instruction_aarch64::extract(insn, 7, 0) == 0b10111111;
134   }
135 
136   bool is_Imm_LdSt() {
137     unsigned int insn = uint_at(0);
138     return Instruction_aarch64::extract(insn, 29, 27) == 0b111 &&
139       Instruction_aarch64::extract(insn, 23, 23) == 0b0 &&
140       Instruction_aarch64::extract(insn, 26, 25) == 0b00;
141   }
142 };
143 
144 inline NativeInstruction* nativeInstruction_at(address address) {
145   return (NativeInstruction*)address;
146 }
147 
148 // The natural type of an AArch64 instruction is uint32_t
149 inline NativeInstruction* nativeInstruction_at(uint32_t* address) {
150   return (NativeInstruction*)address;
151 }
152 
153 class NativePltCall: public NativeInstruction {
154 public:
155   enum Arm_specific_constants {
156     instruction_size           =    4,
157     instruction_offset         =    0,
158     displacement_offset        =    1,
159     return_address_offset      =    4
160   };
161   address instruction_address() const { return addr_at(instruction_offset); }
162   address next_instruction_address() const { return addr_at(return_address_offset); }
163   address displacement_address() const { return addr_at(displacement_offset); }
164   int displacement() const { return (jint) int_at(displacement_offset); }
165   address return_address() const { return addr_at(return_address_offset); }
166   address destination() const;
167   address plt_entry() const;
168   address plt_jump() const;
169   address plt_load_got() const;
170   address plt_resolve_call() const;
171   address plt_c2i_stub() const;
172   void set_stub_to_clean();
173 
174   void reset_to_plt_resolve_call();
175   void set_destination_mt_safe(address dest);
176 
177   void verify() const;
178 };
179 
180 inline NativePltCall* nativePltCall_at(address address) {
181   NativePltCall* call = (NativePltCall*)address;
182   DEBUG_ONLY(call->verify());
183   return call;
184 }
185 
186 inline NativePltCall* nativePltCall_before(address addr) {
187   address at = addr - NativePltCall::instruction_size;
188   return nativePltCall_at(at);
189 }
190 
191 inline NativeCall* nativeCall_at(address address);
192 // The NativeCall is an abstraction for accessing/manipulating native
193 // call instructions (used to manipulate inline caches, primitive &
194 // DSO calls, etc.).
195 
196 class NativeCall: public NativeInstruction {
197 public:
198   enum Aarch64_specific_constants {
199     instruction_size            =    4,
200     instruction_offset          =    0,
201     displacement_offset         =    0,
202     return_address_offset       =    4
203   };
204 
205   address instruction_address() const { return addr_at(instruction_offset); }
206   address next_instruction_address() const { return addr_at(return_address_offset); }
207   int displacement() const { return (int_at(displacement_offset) << 6) >> 4; }
208   address displacement_address() const { return addr_at(displacement_offset); }
209   address return_address() const { return addr_at(return_address_offset); }
210   address destination() const;
211 
212   void set_destination(address dest) {
213     int offset = dest - instruction_address();
214     unsigned int insn = 0b100101 << 26;
215     assert((offset & 3) == 0, "should be");
216     offset >>= 2;
217     offset &= (1 << 26) - 1; // mask off insn part
218     insn |= offset;
219     set_int_at(displacement_offset, insn);
220   }
221 
222   void verify_alignment() { ; }
223   void verify();
224 
225   // Creation
226   inline friend NativeCall* nativeCall_at(address address);
227   inline friend NativeCall* nativeCall_before(address return_address);
228 
229   static bool is_call_before(address return_address) {
230     return is_call_at(return_address - NativeCall::return_address_offset);
231   }
232 
233   // MT-safe patching of a call instruction.
234   static void insert(address code_pos, address entry);
235 
236   static void replace_mt_safe(address instr_addr, address code_buffer);
237 
238   // Similar to replace_mt_safe, but just changes the destination.  The
239   // important thing is that free-running threads are able to execute
240   // this call instruction at all times.  If the call is an immediate BL
241   // instruction we can simply rely on atomicity of 32-bit writes to
242   // make sure other threads will see no intermediate states.
243 
244   // We cannot rely on locks here, since the free-running threads must run at
245   // full speed.
246   //
247   // Used in the runtime linkage of calls; see class CompiledIC.
248   // (Cf. 4506997 and 4479829, where threads witnessed garbage displacements.)
249 
250   // The parameter assert_lock disables the assertion during code generation.
251   void set_destination_mt_safe(address dest, bool assert_lock = true);
252 
253   address get_trampoline();
254   address trampoline_jump(CodeBuffer &cbuf, address dest);
255 };
256 
257 inline NativeCall* nativeCall_at(address address) {
258   NativeCall* call = (NativeCall*)(address - NativeCall::instruction_offset);
259   DEBUG_ONLY(call->verify());
260   return call;
261 }
262 
263 inline NativeCall* nativeCall_before(address return_address) {
264   NativeCall* call = (NativeCall*)(return_address - NativeCall::return_address_offset);
265   DEBUG_ONLY(call->verify());
266   return call;
267 }
268 
269 // An interface for accessing/manipulating native mov reg, imm instructions.
270 // (used to manipulate inlined 64-bit data calls, etc.)
271 class NativeMovConstReg: public NativeInstruction {
272 public:
273   enum Aarch64_specific_constants {
274     instruction_size            =    3 * 4, // movz, movk, movk.  See movptr().
275     instruction_offset          =    0,
276     displacement_offset         =    0,
277   };
278 
279   address instruction_address() const { return addr_at(instruction_offset); }
280 
281   address next_instruction_address() const {
282     if (nativeInstruction_at(instruction_address())->is_movz())
283       // Assume movz, movk, movk
284       return addr_at(instruction_size);
285     else if (is_adrp_at(instruction_address()))
286       return addr_at(2*4);
287     else if (is_ldr_literal_at(instruction_address()))
288       return(addr_at(4));
289     assert(false, "Unknown instruction in NativeMovConstReg");
290     return NULL;
291   }
292 
293   intptr_t data() const;
294   void set_data(intptr_t x);
295 
296   void flush() {
297     if (! maybe_cpool_ref(instruction_address())) {
298       ICache::invalidate_range(instruction_address(), instruction_size);
299     }
300   }
301 
302   void verify();
303   void print();
304 
305   // Creation
306   inline friend NativeMovConstReg* nativeMovConstReg_at(address address);
307   inline friend NativeMovConstReg* nativeMovConstReg_before(address address);
308 };
309 
310 inline NativeMovConstReg* nativeMovConstReg_at(address address) {
311   NativeMovConstReg* test = (NativeMovConstReg*)(address - NativeMovConstReg::instruction_offset);
312   DEBUG_ONLY(test->verify());
313   return test;
314 }
315 
316 inline NativeMovConstReg* nativeMovConstReg_before(address address) {
317   NativeMovConstReg* test = (NativeMovConstReg*)(address - NativeMovConstReg::instruction_size - NativeMovConstReg::instruction_offset);
318   DEBUG_ONLY(test->verify());
319   return test;
320 }
321 
322 class NativeMovConstRegPatching: public NativeMovConstReg {
323 private:
324   friend NativeMovConstRegPatching* nativeMovConstRegPatching_at(address address) {
325     NativeMovConstRegPatching* test = (NativeMovConstRegPatching*)(address - instruction_offset);
326     DEBUG_ONLY(test->verify());
327     return test;
328   }
329 };
330 
331 // An interface for accessing/manipulating native moves of the form:
332 //      mov[b/w/l/q] [reg + offset], reg   (instruction_code_reg2mem)
333 //      mov[b/w/l/q] reg, [reg+offset]     (instruction_code_mem2reg
334 //      mov[s/z]x[w/b/q] [reg + offset], reg
335 //      fld_s  [reg+offset]
336 //      fld_d  [reg+offset]
337 //      fstp_s [reg + offset]
338 //      fstp_d [reg + offset]
339 //      mov_literal64  scratch,<pointer> ; mov[b/w/l/q] 0(scratch),reg | mov[b/w/l/q] reg,0(scratch)
340 //
341 // Warning: These routines must be able to handle any instruction sequences
342 // that are generated as a result of the load/store byte,word,long
343 // macros.  For example: The load_unsigned_byte instruction generates
344 // an xor reg,reg inst prior to generating the movb instruction.  This
345 // class must skip the xor instruction.
346 
347 class NativeMovRegMem: public NativeInstruction {
348   enum AArch64_specific_constants {
349     instruction_size            =    4,
350     instruction_offset          =    0,
351     data_offset                 =    0,
352     next_instruction_offset     =    4
353   };
354 
355 public:
356   // helper
357   int instruction_start() const { return instruction_offset; }
358 
359   address instruction_address() const { return addr_at(instruction_offset); }
360 
361   int num_bytes_to_end_of_patch() const { return instruction_offset + instruction_size; }
362 
363   int offset() const;
364 
365   void set_offset(int x);
366 
367   void add_offset_in_bytes(int add_offset) {
368     set_offset(offset() + add_offset);
369   }
370 
371   void verify();
372 
373 private:
374   inline friend NativeMovRegMem* nativeMovRegMem_at(address address);
375 };
376 
377 inline NativeMovRegMem* nativeMovRegMem_at(address address) {
378   NativeMovRegMem* test = (NativeMovRegMem*)(address - NativeMovRegMem::instruction_offset);
379   DEBUG_ONLY(test->verify());
380   return test;
381 }
382 
383 class NativeMovRegMemPatching: public NativeMovRegMem {
384 private:
385   friend NativeMovRegMemPatching* nativeMovRegMemPatching_at(address address) {
386     Unimplemented();
387     return 0;
388   }
389 };
390 
391 // An interface for accessing/manipulating native leal instruction of form:
392 //        leal reg, [reg + offset]
393 
394 class NativeLoadAddress: public NativeInstruction {
395   enum AArch64_specific_constants {
396     instruction_size            =    4,
397     instruction_offset          =    0,
398     data_offset                 =    0,
399     next_instruction_offset     =    4
400   };
401 
402 public:
403   void verify();
404 };
405 
406 //   adrp    x16, #page
407 //   add     x16, x16, #offset
408 //   ldr     x16, [x16]
409 class NativeLoadGot: public NativeInstruction {
410 public:
411   enum AArch64_specific_constants {
412     instruction_length = 4 * NativeInstruction::instruction_size,
413     offset_offset = 0,
414   };
415 
416   address instruction_address() const { return addr_at(0); }
417   address return_address() const { return addr_at(instruction_length); }
418   address got_address() const;
419   address next_instruction_address() const { return return_address(); }
420   intptr_t data() const;
421   void set_data(intptr_t data) {
422     intptr_t* addr = (intptr_t*)got_address();
423     *addr = data;
424   }
425 
426   void verify() const;
427 private:
428   void report_and_fail() const;
429 };
430 
431 inline NativeLoadGot* nativeLoadGot_at(address addr) {
432   NativeLoadGot* load = (NativeLoadGot*)addr;
433   DEBUG_ONLY(load->verify());
434   return load;
435 }
436 
437 class NativeJump: public NativeInstruction {
438 public:
439   enum AArch64_specific_constants {
440     instruction_size            =    4,
441     instruction_offset          =    0,
442     data_offset                 =    0,
443     next_instruction_offset     =    4
444   };
445 
446   address instruction_address() const { return addr_at(instruction_offset); }
447   address next_instruction_address() const { return addr_at(instruction_size); }
448   address jump_destination() const;
449   void set_jump_destination(address dest);
450 
451   // Creation
452   inline friend NativeJump* nativeJump_at(address address);
453 
454   void verify();
455 
456   // Insertion of native jump instruction
457   static void insert(address code_pos, address entry);
458   // MT-safe insertion of native jump at verified method entry
459   static void check_verified_entry_alignment(address entry, address verified_entry);
460   static void patch_verified_entry(address entry, address verified_entry, address dest);
461 };
462 
463 inline NativeJump* nativeJump_at(address address) {
464   NativeJump* jump = (NativeJump*)(address - NativeJump::instruction_offset);
465   DEBUG_ONLY(jump->verify());
466   return jump;
467 }
468 
469 class NativeGeneralJump: public NativeJump {
470 public:
471   enum AArch64_specific_constants {
472     instruction_size            =    4 * 4,
473     instruction_offset          =    0,
474     data_offset                 =    0,
475     next_instruction_offset     =    4 * 4
476   };
477 
478   address jump_destination() const;
479   void set_jump_destination(address dest);
480 
481   static void insert_unconditional(address code_pos, address entry);
482   static void replace_mt_safe(address instr_addr, address code_buffer);
483   static void verify();
484 };
485 
486 inline NativeGeneralJump* nativeGeneralJump_at(address address) {
487   NativeGeneralJump* jump = (NativeGeneralJump*)(address);
488   DEBUG_ONLY(jump->verify());
489   return jump;
490 }
491 
492 class NativeGotJump: public NativeInstruction {
493 public:
494   enum AArch64_specific_constants {
495     instruction_size = 4 * NativeInstruction::instruction_size,
496   };
497 
498   void verify() const;
499   address instruction_address() const { return addr_at(0); }
500   address destination() const;
501   address return_address() const { return addr_at(instruction_size); }
502   address got_address() const;
503   address next_instruction_address() const { return addr_at(instruction_size); }
504   bool is_GotJump() const;
505 
506   void set_jump_destination(address dest) {
507     address* got = (address*)got_address();
508     *got = dest;
509   }
510 };
511 
512 inline NativeGotJump* nativeGotJump_at(address addr) {
513   NativeGotJump* jump = (NativeGotJump*)(addr);
514   DEBUG_ONLY(jump->verify());
515   return jump;
516 }
517 
518 class NativePopReg : public NativeInstruction {
519 public:
520   // Insert a pop instruction
521   static void insert(address code_pos, Register reg);
522 };
523 
524 
525 class NativeIllegalInstruction: public NativeInstruction {
526 public:
527   // Insert illegal opcode as specific address
528   static void insert(address code_pos);
529 };
530 
531 // return instruction that does not pop values of the stack
532 class NativeReturn: public NativeInstruction {
533 public:
534 };
535 
536 // return instruction that does pop values of the stack
537 class NativeReturnX: public NativeInstruction {
538 public:
539 };
540 
541 // Simple test vs memory
542 class NativeTstRegMem: public NativeInstruction {
543 public:
544 };
545 
546 inline bool NativeInstruction::is_nop() const{
547   uint32_t insn = *(uint32_t*)addr_at(0);
548   return insn == 0xd503201f;
549 }
550 
551 inline bool NativeInstruction::is_jump() {
552   uint32_t insn = *(uint32_t*)addr_at(0);
553 
554   if (Instruction_aarch64::extract(insn, 30, 26) == 0b00101) {
555     // Unconditional branch (immediate)
556     return true;
557   } else if (Instruction_aarch64::extract(insn, 31, 25) == 0b0101010) {
558     // Conditional branch (immediate)
559     return true;
560   } else if (Instruction_aarch64::extract(insn, 30, 25) == 0b011010) {
561     // Compare & branch (immediate)
562     return true;
563   } else if (Instruction_aarch64::extract(insn, 30, 25) == 0b011011) {
564     // Test & branch (immediate)
565     return true;
566   } else
567     return false;
568 }
569 
570 inline bool NativeInstruction::is_jump_or_nop() {
571   return is_nop() || is_jump();
572 }
573 
574 // Call trampoline stubs.
575 class NativeCallTrampolineStub : public NativeInstruction {
576 public:
577 
578   enum AArch64_specific_constants {
579     instruction_size            =    4 * 4,
580     instruction_offset          =    0,
581     data_offset                 =    2 * 4,
582     next_instruction_offset     =    4 * 4
583   };
584 
585   address destination(nmethod* nm = NULL) const;
586   void set_destination(address new_destination);
587   ptrdiff_t destination_offset() const;
588 };
589 
590 inline bool is_NativeCallTrampolineStub_at(address addr) {
591   // Ensure that the stub is exactly
592   //      ldr   xscratch1, L
593   //      br    xscratch1
594   // L:
595   uint32_t* i = (uint32_t*)addr;
596   return i[0] == 0x58000048 && i[1] == 0xd61f0100;
597 }
598 
599 inline NativeCallTrampolineStub* nativeCallTrampolineStub_at(address addr) {
600   assert(is_NativeCallTrampolineStub_at(addr), "no call trampoline found");
601   return (NativeCallTrampolineStub*)addr;
602 }
603 
604 class NativeMembar : public NativeInstruction {
605 public:
606   unsigned int get_kind() { return Instruction_aarch64::extract(uint_at(0), 11, 8); }
607   void set_kind(int order_kind) { Instruction_aarch64::patch(addr_at(0), 11, 8, order_kind); }
608 };
609 
610 inline NativeMembar* NativeMembar_at(address addr) {
611   assert(nativeInstruction_at(addr)->is_Membar(), "no membar found");
612   return (NativeMembar*)addr;
613 }
614 
615 class NativeLdSt : public NativeInstruction {
616 private:
617   int32_t size() { return Instruction_aarch64::extract(uint_at(0), 31, 30); }
618   // Check whether instruction is with unscaled offset.
619   bool is_ldst_ur() {
620     return (Instruction_aarch64::extract(uint_at(0), 29, 21) == 0b111000010 ||
621             Instruction_aarch64::extract(uint_at(0), 29, 21) == 0b111000000) &&
622       Instruction_aarch64::extract(uint_at(0), 11, 10) == 0b00;
623   }
624   bool is_ldst_unsigned_offset() {
625     return Instruction_aarch64::extract(uint_at(0), 29, 22) == 0b11100101 ||
626       Instruction_aarch64::extract(uint_at(0), 29, 22) == 0b11100100;
627   }
628 public:
629   Register target() {
630     uint32_t r = Instruction_aarch64::extract(uint_at(0), 4, 0);
631     return r == 0x1f ? zr : as_Register(r);
632   }
633   Register base() {
634     uint32_t b = Instruction_aarch64::extract(uint_at(0), 9, 5);
635     return b == 0x1f ? sp : as_Register(b);
636   }
637   int64_t offset() {
638     if (is_ldst_ur()) {
639       return Instruction_aarch64::sextract(uint_at(0), 20, 12);
640     } else if (is_ldst_unsigned_offset()) {
641       return Instruction_aarch64::extract(uint_at(0), 21, 10) << size();
642     } else {
643       // others like: pre-index or post-index.
644       ShouldNotReachHere();
645       return 0;
646     }
647   }
648   size_t size_in_bytes() { return 1ULL << size(); }
649   bool is_not_pre_post_index() { return (is_ldst_ur() || is_ldst_unsigned_offset()); }
650   bool is_load() {
651     assert(Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01 ||
652            Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00, "must be ldr or str");
653 
654     return Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01;
655   }
656   bool is_store() {
657     assert(Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b01 ||
658            Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00, "must be ldr or str");
659 
660     return Instruction_aarch64::extract(uint_at(0), 23, 22) == 0b00;
661   }
662 };
663 
664 inline NativeLdSt* NativeLdSt_at(address addr) {
665   assert(nativeInstruction_at(addr)->is_Imm_LdSt(), "no immediate load/store found");
666   return (NativeLdSt*)addr;
667 }
668 
669 class NativePostCallNop: public NativeInstruction {
670 public:
671   bool check() const { return is_nop(); }
672   int displacement() const { return 0; }
673   void patch(jint diff);
674   void make_deopt();
675 };
676 
677 inline NativePostCallNop* nativePostCallNop_at(address address) {
678   NativePostCallNop* nop = (NativePostCallNop*) address;
679   if (nop->check()) {
680     return nop;
681   }
682   return NULL;
683 }
684 
685 inline NativePostCallNop* nativePostCallNop_unsafe_at(address address) {
686   NativePostCallNop* nop = (NativePostCallNop*) address;
687   assert (nop->check(), "");
688   return nop;
689 }
690 
691 class NativeDeoptInstruction: public NativeInstruction {
692  public:
693   enum {
694     instruction_size            =    4,
695     instruction_offset          =    0,
696   };
697 
698   address instruction_address() const       { return addr_at(instruction_offset); }
699   address next_instruction_address() const  { return addr_at(instruction_size); }
700 
701   void  verify();
702 
703   static bool is_deopt_at(address instr) {
704     assert (instr != NULL, "");
705     uint32_t value = *(uint32_t *) instr;
706     return value == 0xd4ade001;
707   }
708 
709   // MT-safe patching
710   static void insert(address code_pos);
711 };
712 
713 #endif // CPU_AARCH64_NATIVEINST_AARCH64_HPP
--- EOF ---