< prev index next >

src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp

Print this page

        

@@ -218,10 +218,27 @@
   } else {
     ShouldNotReachHere();
   }
 }
 
+void LIR_Assembler::getfp(LIR_Opr opr) {
+  __ lea(opr->as_register_lo(), Address(rsp, initial_frame_size_in_bytes() + wordSize)); // + wordSize seems to be required to handle the push rbp before the sub of rsp
+}
+
+void LIR_Assembler::getsp(LIR_Opr opr) {
+  __ movptr(opr->as_register_lo(), rsp);
+}
+
+#if 0
+void LIR_Assembler::getpc(LIR_Opr opr) {
+  const char *name + "cont_getPC";
+  address entry = StubRoutines::cont_getPC();
+  __ call_VM_leaf(entry, 0);
+  __ movptr(opr->as_register_lo(), rax);
+}
+#endif
+
 bool LIR_Assembler::is_literal_address(LIR_Address* addr) {
   return addr->base()->is_illegal() && addr->index()->is_illegal();
 }
 
 //-------------------------------------------

@@ -2831,18 +2848,22 @@
 void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
   assert((__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0,
          "must be aligned");
   __ call(AddressLiteral(op->addr(), rtype));
   add_call_info(code_offset(), op->info());
+  __ oopmap_metadata(op->info());
+  __ post_call_nop();
 }
 
 
 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
   __ ic_call(op->addr());
   add_call_info(code_offset(), op->info());
+  __ oopmap_metadata(op->info());
   assert((__ offset() - NativeCall::instruction_size + NativeCall::displacement_offset) % BytesPerWord == 0,
          "must be aligned");
+  __ post_call_nop();
 }
 
 
 /* Currently, vtable-dispatch is only enabled for sparc platforms */
 void LIR_Assembler::vtable_call(LIR_OpJavaCall* op) {

@@ -3836,11 +3857,13 @@
 void LIR_Assembler::rt_call(LIR_Opr result, address dest, const LIR_OprList* args, LIR_Opr tmp, CodeEmitInfo* info) {
   assert(!tmp->is_valid(), "don't need temporary");
   __ call(RuntimeAddress(dest));
   if (info != NULL) {
     add_call_info_here(info);
+    __ oopmap_metadata(info);
   }
+  __ post_call_nop();
 }
 
 
 void LIR_Assembler::volatile_move_op(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info) {
   assert(type == T_LONG, "only for volatile long fields");
< prev index next >