< prev index next > src/hotspot/cpu/riscv/interp_masm_riscv.cpp
Print this page
bool notify_jvmdi) {
// Note: Registers x13 may be in use for the
// result check if synchronized method
Label unlocked, unlock, no_unlock;
+ #ifdef ASSERT
+ Label not_preempted;
+ ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
+ beqz(t0, not_preempted);
+ stop("remove_activation: should not have alternate return address set");
+ bind(not_preempted);
+ #endif /* ASSERT */
+
// get the value of _do_not_unlock_if_synchronized into x13
const Address do_not_unlock_if_synchronized(xthread,
in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
lbu(x13, do_not_unlock_if_synchronized);
sb(zr, do_not_unlock_if_synchronized); // reset the flag
}
void InterpreterMacroAssembler::call_VM_base(Register oop_result,
Register java_thread,
Register last_java_sp,
+ Label* return_pc,
address entry_point,
int number_of_arguments,
bool check_exceptions) {
// interpreter specific
//
bind(L);
}
#endif /* ASSERT */
// super call
MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
! entry_point, number_of_arguments,
! check_exceptions);
! // interpreter specific
restore_bcp();
restore_locals();
}
! void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
! address entry_point,
! Register arg_1) {
! assert(arg_1 == c_rarg1, "");
Label resume_pc, not_preempted;
#ifdef ASSERT
{
! Label L;
ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
! beqz(t0, L);
! stop("Should not have alternate return address set");
! bind(L);
}
#endif /* ASSERT */
// Force freeze slow path.
push_cont_fastpath();
// Make VM call. In case of preemption set last_pc to the one we want to resume to.
! la(t0, resume_pc);
! sd(t0, Address(xthread, JavaThread::last_Java_pc_offset()));
- call_VM_base(oop_result, noreg, noreg, entry_point, 1, false /*check_exceptions*/);
pop_cont_fastpath();
// Check if preempted.
ld(t1, Address(xthread, JavaThread::preempt_alternate_return_offset()));
beqz(t1, not_preempted);
sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
jr(t1);
bind(L);
}
#endif /* ASSERT */
// super call
MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
! return_pc, entry_point,
! number_of_arguments, check_exceptions);
! // interpreter specific
restore_bcp();
restore_locals();
}
! void InterpreterMacroAssembler::call_VM_preemptable_helper(Register oop_result,
! address entry_point,
! int number_of_arguments,
! bool check_exceptions) {
+ assert(InterpreterRuntime::is_preemptable_call(entry_point),
+ "VM call not preemptable, should use call_VM()");
Label resume_pc, not_preempted;
#ifdef ASSERT
{
! Label L1, L2;
ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
! beqz(t0, L1);
! stop("call_VM_preemptable_helper: Should not have alternate return address set");
! bind(L1);
+ // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
+ incrementw(Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ lw(t0, Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ bgtz(t0, L2);
+ stop("call_VM_preemptable_helper: should be > 0");
+ bind(L2);
}
#endif /* ASSERT */
// Force freeze slow path.
push_cont_fastpath();
// Make VM call. In case of preemption set last_pc to the one we want to resume to.
! // Note: call_VM_base will use resume_pc label to set last_Java_pc.
! call_VM_base(noreg, noreg, noreg, &resume_pc, entry_point, number_of_arguments, false /*check_exceptions*/);
pop_cont_fastpath();
+ #ifdef ASSERT
+ {
+ Label L;
+ decrementw(Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ lw(t0, Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ bgez(t0, L);
+ stop("call_VM_preemptable_helper: should be >= 0");
+ bind(L);
+ }
+ #endif /* ASSERT */
+
// Check if preempted.
ld(t1, Address(xthread, JavaThread::preempt_alternate_return_offset()));
beqz(t1, not_preempted);
sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
jr(t1);
// In case of preemption, this is where we will resume once we finally acquire the monitor.
bind(resume_pc);
restore_after_resume(false /* is_native */);
bind(not_preempted);
+ if (check_exceptions) {
+ // check for pending exceptions
+ ld(t0, Address(xthread, in_bytes(Thread::pending_exception_offset())));
+ Label ok;
+ beqz(t0, ok);
+ la(t1, RuntimeAddress(StubRoutines::forward_exception_entry()));
+ jr(t1);
+ bind(ok);
+ }
+
+ // get oop result if there is one and reset the value in the thread
+ if (oop_result->is_valid()) {
+ get_vm_result_oop(oop_result, xthread);
+ }
+ }
+
+ static void pass_arg1(MacroAssembler* masm, Register arg) {
+ if (c_rarg1 != arg) {
+ masm->mv(c_rarg1, arg);
+ }
+ }
+
+ static void pass_arg2(MacroAssembler* masm, Register arg) {
+ if (c_rarg2 != arg) {
+ masm->mv(c_rarg2, arg);
+ }
+ }
+
+ void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
+ address entry_point,
+ Register arg_1,
+ bool check_exceptions) {
+ pass_arg1(this, arg_1);
+ call_VM_preemptable_helper(oop_result, entry_point, 1, check_exceptions);
+ }
+
+ void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
+ address entry_point,
+ Register arg_1,
+ Register arg_2,
+ bool check_exceptions) {
+ LP64_ONLY(assert_different_registers(arg_1, c_rarg2));
+ pass_arg2(this, arg_2);
+ pass_arg1(this, arg_1);
+ call_VM_preemptable_helper(oop_result, entry_point, 2, check_exceptions);
}
void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
la(t1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
jalr(t1);
< prev index next >