< prev index next > src/hotspot/cpu/x86/interp_masm_x86.cpp
Print this page
// interpreter specific
restore_bcp();
restore_locals();
}
! void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
! address entry_point,
! Register arg_1) {
! assert(arg_1 == c_rarg1, "");
Label resume_pc, not_preempted;
#ifdef ASSERT
{
! Label L;
cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
! jcc(Assembler::equal, L);
! stop("Should not have alternate return address set");
! bind(L);
}
#endif /* ASSERT */
// Force freeze slow path.
push_cont_fastpath();
// Make VM call. In case of preemption set last_pc to the one we want to resume to.
- // Note: call_VM_helper requires last_Java_pc for anchor to be at the top of the stack.
lea(rscratch1, resume_pc);
push(rscratch1);
! MacroAssembler::call_VM_helper(oop_result, entry_point, 1, false /*check_exceptions*/);
pop(rscratch1);
pop_cont_fastpath();
// Check if preempted.
movptr(rscratch1, Address(r15_thread, JavaThread::preempt_alternate_return_offset()));
cmpptr(rscratch1, NULL_WORD);
jccb(Assembler::zero, not_preempted);
movptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
// interpreter specific
restore_bcp();
restore_locals();
}
! void InterpreterMacroAssembler::call_VM_preemptable_helper(Register oop_result,
! address entry_point,
! int number_of_arguments,
! bool check_exceptions) {
+ assert(InterpreterRuntime::is_preemptable_call(entry_point), "VM call not preemptable, should use call_VM()");
Label resume_pc, not_preempted;
#ifdef ASSERT
{
! Label L1, L2;
cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
! jcc(Assembler::equal, L1);
! stop("call_VM_preemptable_helper: should not have alternate return address set");
! bind(L1);
+ // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
+ incrementl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ cmpl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()), 0);
+ jcc(Assembler::greater, L2);
+ stop("call_VM_preemptable_helper: should be > 0");
+ bind(L2);
}
#endif /* ASSERT */
// Force freeze slow path.
push_cont_fastpath();
// Make VM call. In case of preemption set last_pc to the one we want to resume to.
lea(rscratch1, resume_pc);
push(rscratch1);
! lea(rax, Address(rsp, wordSize));
+ call_VM_base(noreg, rax, entry_point, number_of_arguments, false);
pop(rscratch1);
pop_cont_fastpath();
+ #ifdef ASSERT
+ {
+ Label L;
+ decrementl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
+ cmpl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()), 0);
+ jcc(Assembler::greaterEqual, L);
+ stop("call_VM_preemptable_helper: should be >= 0");
+ bind(L);
+ }
+ #endif /* ASSERT */
+
// Check if preempted.
movptr(rscratch1, Address(r15_thread, JavaThread::preempt_alternate_return_offset()));
cmpptr(rscratch1, NULL_WORD);
jccb(Assembler::zero, not_preempted);
movptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
// In case of preemption, this is where we will resume once we finally acquire the monitor.
bind(resume_pc);
restore_after_resume(false /* is_native */);
bind(not_preempted);
+ if (check_exceptions) {
+ // check for pending exceptions (java_thread is set upon return)
+ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), NULL_WORD);
+ Label ok;
+ jcc(Assembler::equal, ok);
+ // Exception stub expects return pc to be at top of stack. We only need
+ // it to check Interpreter::contains(return_address) so anything will do.
+ lea(rscratch1, resume_pc);
+ push(rscratch1);
+ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
+ bind(ok);
+ }
+
+ // get oop result if there is one and reset the value in the thread
+ if (oop_result->is_valid()) {
+ get_vm_result_oop(oop_result);
+ }
+ }
+
+ static void pass_arg1(MacroAssembler* masm, Register arg) {
+ if (c_rarg1 != arg ) {
+ masm->mov(c_rarg1, arg);
+ }
+ }
+
+ static void pass_arg2(MacroAssembler* masm, Register arg) {
+ if (c_rarg2 != arg ) {
+ masm->mov(c_rarg2, arg);
+ }
+ }
+
+ void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
+ address entry_point,
+ Register arg_1,
+ bool check_exceptions) {
+ pass_arg1(this, arg_1);
+ call_VM_preemptable_helper(oop_result, entry_point, 1, check_exceptions);
+ }
+
+ void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
+ address entry_point,
+ Register arg_1,
+ Register arg_2,
+ bool check_exceptions) {
+ LP64_ONLY(assert_different_registers(arg_1, c_rarg2));
+ pass_arg2(this, arg_2);
+ pass_arg1(this, arg_1);
+ call_VM_preemptable_helper(oop_result, entry_point, 2, check_exceptions);
}
void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
lea(rscratch1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
call(rscratch1);
bool notify_jvmdi) {
// Note: Registers rdx xmm0 may be in use for the
// result check if synchronized method
Label unlocked, unlock, no_unlock;
+ #ifdef ASSERT
+ Label not_preempted;
+ cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
+ jcc(Assembler::equal, not_preempted);
+ stop("remove_activation: should not have alternate return address set");
+ bind(not_preempted);
+ #endif /* ASSERT */
+
const Register rthread = r15_thread;
const Register robj = c_rarg1;
const Register rmon = c_rarg1;
// get the value of _do_not_unlock_if_synchronized into rdx
< prev index next >