< prev index next > src/hotspot/cpu/riscv/interp_masm_riscv.cpp
Print this page
// t0, t1 (temp regs)
void InterpreterMacroAssembler::lock_object(Register lock_reg)
{
assert(lock_reg == c_rarg1, "The argument is only for looks. It must be c_rarg1");
if (LockingMode == LM_MONITOR) {
- call_VM(noreg,
+ call_VM_preemptable(noreg,
CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
lock_reg);
} else {
Label count, done;
bnez(tmp, slow_case);
}
if (LockingMode == LM_LIGHTWEIGHT) {
lightweight_lock(lock_reg, obj_reg, tmp, tmp2, tmp3, slow_case);
- j(count);
+ j(done);
} else if (LockingMode == LM_LEGACY) {
// Load (object->mark() | 1) into swap_reg
ld(t0, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
ori(swap_reg, t0, 1);
mv(t0, (int64_t)(7 - (int)os::vm_page_size()));
andr(swap_reg, swap_reg, t0);
// Save the test result, for recursive case, the result is zero
sd(swap_reg, Address(lock_reg, mark_offset));
- beqz(swap_reg, count);
+ bnez(swap_reg, slow_case);
+
+ bind(count);
+ inc_held_monitor_count();
+ j(done);
}
bind(slow_case);
// Call the runtime routine for slow case
- call_VM(noreg,
+ call_VM_preemptable(noreg,
CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
lock_reg);
- j(done);
-
- bind(count);
- increment(Address(xthread, JavaThread::held_monitor_count_offset()));
bind(done);
}
}
ld(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
// Free entry
sd(zr, Address(lock_reg, BasicObjectLock::obj_offset()));
+ Label slow_case;
if (LockingMode == LM_LIGHTWEIGHT) {
- Label slow_case;
lightweight_unlock(obj_reg, header_reg, swap_reg, tmp_reg, slow_case);
- j(count);
-
- bind(slow_case);
+ j(done);
} else if (LockingMode == LM_LEGACY) {
// Load the old header from BasicLock structure
ld(header_reg, Address(swap_reg,
BasicLock::displaced_header_offset_in_bytes()));
// Test for recursion
beqz(header_reg, count);
// Atomic swap back the old header
- cmpxchg_obj_header(swap_reg, header_reg, obj_reg, tmp_reg, count, /*fallthrough*/nullptr);
+ cmpxchg_obj_header(swap_reg, header_reg, obj_reg, tmp_reg, count, &slow_case);
+
+ bind(count);
+ dec_held_monitor_count();
+ j(done);
}
+ bind(slow_case);
// Call the runtime routine for slow case.
sd(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset())); // restore obj
call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), lock_reg);
- j(done);
-
- bind(count);
- decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
-
bind(done);
-
restore_bcp();
}
}
// interpreter specific
restore_bcp();
restore_locals();
}
+ void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
+ address entry_point,
+ Register arg_1) {
+ assert(arg_1 == c_rarg1, "");
+ Label resume_pc, not_preempted;
+
+ push_cont_fastpath();
+
+ // Make VM call. In case of preemption set last_pc to
+ // the one we want to resume to.
+ la(t0, resume_pc);
+ sd(t0, Address(xthread, JavaThread::last_Java_pc_offset()));
+ call_VM_base(oop_result, noreg, noreg, entry_point, 1, false /*check_exceptions*/);
+
+ pop_cont_fastpath();
+
+ // Check if preempted
+ ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
+ beqz(t0, not_preempted);
+ sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
+ jr(t0);
+
+ bind(resume_pc);
+ restore_after_resume(false /* is_native */);
+
+ bind(not_preempted);
+ }
+
+ void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
+ la(t0, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
+ jalr(t0);
+ if (is_native) {
+ // On resume we need to set up stack as expected
+ push(dtos);
+ push(ltos);
+ }
+ }
+
void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr, Register tmp) {
assert_different_registers(obj, tmp, t0, mdo_addr.base());
Label update, next, none;
verify_oop(obj);
< prev index next >