< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page
*** 439,10 ***
--- 439,11 ---
    if (method()->is_synchronized()) {
      monitor_address(0, FrameMap::r0_opr);
      stub = new MonitorExitStub(FrameMap::r0_opr, true, 0);
      __ unlock_object(r5, r4, r0, *stub->entry());
      __ bind(*stub->continuation());
+     __ dec_held_monitor_count(rthread);
    }
  
    if (compilation()->env()->dtrace_method_probes()) {
      __ mov(c_rarg0, rthread);
      __ mov_metadata(c_rarg1, method()->constant_encoding());

*** 2054,20 ***
--- 2055,22 ---
    if (call == NULL) {
      bailout("trampoline stub overflow");
      return;
    }
    add_call_info(code_offset(), op->info());
+   __ post_call_nop();
  }
  
  
  void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
    address call = __ ic_call(op->addr());
    if (call == NULL) {
      bailout("trampoline stub overflow");
      return;
    }
    add_call_info(code_offset(), op->info());
+   __ post_call_nop();
  }
  
  void LIR_Assembler::emit_static_call_stub() {
    address call_pc = __ pc();
    address stub = __ start_a_stub(call_stub_size());

*** 2588,11 ***
--- 2591,22 ---
      assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
      __ unlock_object(hdr, obj, lock, *op->stub()->entry());
    } else {
      Unimplemented();
    }
+   if (op->code() == lir_lock) {
+     // If deoptimization happens in Runtime1::monitorenter, inc_held_monitor_count after backing from slowpath
+     // will be skipped. Solution is:
+     // 1. Increase only in fastpath
+     // 2. Runtime1::monitorenter increase count after locking
+     __ inc_held_monitor_count(rthread);
+   }
    __ bind(*op->stub()->continuation());
+   if (op->code() == lir_unlock) {
+     // unlock in slowpath is JRT_Leaf stub, no deoptimization can happen
+     __ dec_held_monitor_count(rthread);
+   }
  }
  
  
  void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
    ciMethod* method = op->profiled_method();

*** 2900,10 ***
--- 2914,11 ---
    }
  
    if (info != NULL) {
      add_call_info_here(info);
    }
+   __ post_call_nop();
  }
  
  void LIR_Assembler::volatile_move_op(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info) {
    if (dest->is_address() || src->is_address()) {
      move_op(src, dest, type, lir_patch_none, info,
< prev index next >