< prev index next >

src/hotspot/cpu/x86/gc/shared/barrierSetNMethod_x86.cpp

Print this page

        

@@ -33,26 +33,36 @@
 #include "utilities/align.hpp"
 #include "utilities/debug.hpp"
 
 class NativeNMethodCmpBarrier: public NativeInstruction {
 public:
+#ifdef _LP64
   enum Intel_specific_constants {
     instruction_code        = 0x81,
     instruction_size        = 8,
     imm_offset              = 4,
     instruction_rex_prefix  = Assembler::REX | Assembler::REX_B,
     instruction_modrm       = 0x7f  // [r15 + offset]
   };
+#else
+  enum Intel_specific_constants {
+    instruction_code        = 0x81,
+    instruction_size        = 7,
+    imm_offset              = 2,
+    instruction_modrm       = 0x3f  // [rdi]
+  };
+#endif
 
   address instruction_address() const { return addr_at(0); }
   address immediate_address() const { return addr_at(imm_offset); }
 
   jint get_immedate() const { return int_at(imm_offset); }
   void set_immediate(jint imm) { set_int_at(imm_offset, imm); }
   void verify() const;
 };
 
+#ifdef _LP64
 void NativeNMethodCmpBarrier::verify() const {
   if (((uintptr_t) instruction_address()) & 0x7) {
     fatal("Not properly aligned");
   }
 

@@ -75,10 +85,31 @@
     tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()),
         modrm);
     fatal("not a cmp barrier");
   }
 }
+#else
+void NativeNMethodCmpBarrier::verify() const {
+  if (((uintptr_t) instruction_address()) & 0x3) {
+    fatal("Not properly aligned");
+  }
+
+  int inst = ubyte_at(0);
+  if (inst != instruction_code) {
+    tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()),
+        inst);
+    fatal("not a cmp barrier");
+  }
+
+  int modrm = ubyte_at(1);
+  if (modrm != instruction_modrm) {
+    tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()),
+        modrm);
+    fatal("not a cmp barrier");
+  }
+}
+#endif // _LP64
 
 void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {
   /*
    * [ callers frame          ]
    * [ callers return address ] <- callers rsp

@@ -125,11 +156,11 @@
 // If any code changes between the end of the verified entry where the entry
 // barrier resides, and the completion of the frame, then
 // NativeNMethodCmpBarrier::verify() will immediately complain when it does
 // not find the expected native instruction at this offset, which needs updating.
 // Note that this offset is invariant of PreserveFramePointer.
-static const int entry_barrier_offset = -19;
+static const int entry_barrier_offset = LP64_ONLY(-19) NOT_LP64(-18);
 
 static NativeNMethodCmpBarrier* native_nmethod_barrier(nmethod* nm) {
   address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset;
   NativeNMethodCmpBarrier* barrier = reinterpret_cast<NativeNMethodCmpBarrier*>(barrier_address);
   debug_only(barrier->verify());
< prev index next >