< prev index next >

src/hotspot/cpu/x86/interp_masm_x86.cpp

Print this page

 309   save_bcp();
 310 #ifdef ASSERT
 311   {
 312     Label L;
 313     cmpptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
 314     jcc(Assembler::equal, L);
 315     stop("InterpreterMacroAssembler::call_VM_base:"
 316          " last_sp isn't null");
 317     bind(L);
 318   }
 319 #endif /* ASSERT */
 320   // super call
 321   MacroAssembler::call_VM_base(oop_result, last_java_sp,
 322                                entry_point, number_of_arguments,
 323                                check_exceptions);
 324   // interpreter specific
 325   restore_bcp();
 326   restore_locals();
 327 }
 328 
 329 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
 330                                                     address entry_point,
 331                                                     Register arg_1) {
 332   assert(arg_1 == c_rarg1, "");

 333   Label resume_pc, not_preempted;
 334 
 335 #ifdef ASSERT
 336   {
 337     Label L;
 338     cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
 339     jcc(Assembler::equal, L);
 340     stop("Should not have alternate return address set");
 341     bind(L);






 342   }
 343 #endif /* ASSERT */
 344 
 345   // Force freeze slow path.
 346   push_cont_fastpath();
 347 
 348   // Make VM call. In case of preemption set last_pc to the one we want to resume to.
 349   // Note: call_VM_helper requires last_Java_pc for anchor to be at the top of the stack.
 350   lea(rscratch1, resume_pc);
 351   push(rscratch1);
 352   MacroAssembler::call_VM_helper(oop_result, entry_point, 1, false /*check_exceptions*/);

 353   pop(rscratch1);
 354 
 355   pop_cont_fastpath();
 356 











 357   // Check if preempted.
 358   movptr(rscratch1, Address(r15_thread, JavaThread::preempt_alternate_return_offset()));
 359   cmpptr(rscratch1, NULL_WORD);
 360   jccb(Assembler::zero, not_preempted);
 361   movptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
 362   jmp(rscratch1);
 363 
 364   // In case of preemption, this is where we will resume once we finally acquire the monitor.
 365   bind(resume_pc);
 366   restore_after_resume(false /* is_native */);
 367 
 368   bind(not_preempted);
















































 369 }
 370 
 371 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
 372   lea(rscratch1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
 373   call(rscratch1);
 374   if (is_native) {
 375     // On resume we need to set up stack as expected.
 376     push(dtos);
 377     push(ltos);
 378   }
 379 }
 380 
 381 void InterpreterMacroAssembler::check_and_handle_popframe() {
 382   if (JvmtiExport::can_pop_frame()) {
 383     Label L;
 384     // Initiate popframe handling only if it is not already being
 385     // processed.  If the flag has the popframe_processing bit set, it
 386     // means that this code is called *during* popframe handling - we
 387     // don't want to reenter.
 388     // This method is only called just after the call into the vm in

 783 // Apply stack watermark barrier.
 784 // Notify JVMTI.
 785 // Remove the activation from the stack.
 786 //
 787 // If there are locked Java monitors
 788 //    If throw_monitor_exception
 789 //       throws IllegalMonitorStateException
 790 //    Else if install_monitor_exception
 791 //       installs IllegalMonitorStateException
 792 //    Else
 793 //       no error processing
 794 void InterpreterMacroAssembler::remove_activation(TosState state,
 795                                                   Register ret_addr,
 796                                                   bool throw_monitor_exception,
 797                                                   bool install_monitor_exception,
 798                                                   bool notify_jvmdi) {
 799   // Note: Registers rdx xmm0 may be in use for the
 800   // result check if synchronized method
 801   Label unlocked, unlock, no_unlock;
 802 








 803   const Register rthread = r15_thread;
 804   const Register robj    = c_rarg1;
 805   const Register rmon    = c_rarg1;
 806 
 807   // get the value of _do_not_unlock_if_synchronized into rdx
 808   const Address do_not_unlock_if_synchronized(rthread,
 809     in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
 810   movbool(rbx, do_not_unlock_if_synchronized);
 811   movbool(do_not_unlock_if_synchronized, false); // reset the flag
 812 
 813  // get method access flags
 814   movptr(rcx, Address(rbp, frame::interpreter_frame_method_offset * wordSize));
 815   load_unsigned_short(rcx, Address(rcx, Method::access_flags_offset()));
 816   testl(rcx, JVM_ACC_SYNCHRONIZED);
 817   jcc(Assembler::zero, unlocked);
 818 
 819   // Don't unlock anything if the _do_not_unlock_if_synchronized flag
 820   // is set.
 821   testbool(rbx);
 822   jcc(Assembler::notZero, no_unlock);

 309   save_bcp();
 310 #ifdef ASSERT
 311   {
 312     Label L;
 313     cmpptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
 314     jcc(Assembler::equal, L);
 315     stop("InterpreterMacroAssembler::call_VM_base:"
 316          " last_sp isn't null");
 317     bind(L);
 318   }
 319 #endif /* ASSERT */
 320   // super call
 321   MacroAssembler::call_VM_base(oop_result, last_java_sp,
 322                                entry_point, number_of_arguments,
 323                                check_exceptions);
 324   // interpreter specific
 325   restore_bcp();
 326   restore_locals();
 327 }
 328 
 329 void InterpreterMacroAssembler::call_VM_preemptable_helper(Register oop_result,
 330                                                            address entry_point,
 331                                                            int number_of_arguments,
 332                                                            bool check_exceptions) {
 333   assert(InterpreterRuntime::is_preemptable_call(entry_point), "VM call not preemptable, should use call_VM()");
 334   Label resume_pc, not_preempted;
 335 
 336 #ifdef ASSERT
 337   {
 338     Label L1, L2;
 339     cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
 340     jcc(Assembler::equal, L1);
 341     stop("call_VM_preemptable_helper: should not have alternate return address set");
 342     bind(L1);
 343     // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
 344     incrementl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
 345     cmpl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()), 0);
 346     jcc(Assembler::greater, L2);
 347     stop("call_VM_preemptable_helper: should be > 0");
 348     bind(L2);
 349   }
 350 #endif /* ASSERT */
 351 
 352   // Force freeze slow path.
 353   push_cont_fastpath();
 354 
 355   // Make VM call. In case of preemption set last_pc to the one we want to resume to.

 356   lea(rscratch1, resume_pc);
 357   push(rscratch1);
 358   lea(rax, Address(rsp, wordSize));
 359   call_VM_base(noreg, rax, entry_point, number_of_arguments, false);
 360   pop(rscratch1);
 361 
 362   pop_cont_fastpath();
 363 
 364 #ifdef ASSERT
 365   {
 366     Label L;
 367     decrementl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
 368     cmpl(Address(r15_thread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()), 0);
 369     jcc(Assembler::greaterEqual, L);
 370     stop("call_VM_preemptable_helper: should be >= 0");
 371     bind(L);
 372   }
 373 #endif /* ASSERT */
 374 
 375   // Check if preempted.
 376   movptr(rscratch1, Address(r15_thread, JavaThread::preempt_alternate_return_offset()));
 377   cmpptr(rscratch1, NULL_WORD);
 378   jccb(Assembler::zero, not_preempted);
 379   movptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
 380   jmp(rscratch1);
 381 
 382   // In case of preemption, this is where we will resume once we finally acquire the monitor.
 383   bind(resume_pc);
 384   restore_after_resume(false /* is_native */);
 385 
 386   bind(not_preempted);
 387   if (check_exceptions) {
 388     // check for pending exceptions (java_thread is set upon return)
 389     cmpptr(Address(r15_thread, Thread::pending_exception_offset()), NULL_WORD);
 390     Label ok;
 391     jcc(Assembler::equal, ok);
 392     // Exception stub expects return pc to be at top of stack. We only need
 393     // it to check Interpreter::contains(return_address) so anything will do.
 394     lea(rscratch1, resume_pc);
 395     push(rscratch1);
 396     jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
 397     bind(ok);
 398   }
 399 
 400   // get oop result if there is one and reset the value in the thread
 401   if (oop_result->is_valid()) {
 402     get_vm_result_oop(oop_result);
 403   }
 404 }
 405 
 406 static void pass_arg1(MacroAssembler* masm, Register arg) {
 407   if (c_rarg1 != arg ) {
 408     masm->mov(c_rarg1, arg);
 409   }
 410 }
 411 
 412 static void pass_arg2(MacroAssembler* masm, Register arg) {
 413   if (c_rarg2 != arg ) {
 414     masm->mov(c_rarg2, arg);
 415   }
 416 }
 417 
 418 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
 419                                          address entry_point,
 420                                          Register arg_1,
 421                                          bool check_exceptions) {
 422   pass_arg1(this, arg_1);
 423   call_VM_preemptable_helper(oop_result, entry_point, 1, check_exceptions);
 424 }
 425 
 426 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
 427                                          address entry_point,
 428                                          Register arg_1,
 429                                          Register arg_2,
 430                                          bool check_exceptions) {
 431   LP64_ONLY(assert_different_registers(arg_1, c_rarg2));
 432   pass_arg2(this, arg_2);
 433   pass_arg1(this, arg_1);
 434   call_VM_preemptable_helper(oop_result, entry_point, 2, check_exceptions);
 435 }
 436 
 437 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
 438   lea(rscratch1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
 439   call(rscratch1);
 440   if (is_native) {
 441     // On resume we need to set up stack as expected.
 442     push(dtos);
 443     push(ltos);
 444   }
 445 }
 446 
 447 void InterpreterMacroAssembler::check_and_handle_popframe() {
 448   if (JvmtiExport::can_pop_frame()) {
 449     Label L;
 450     // Initiate popframe handling only if it is not already being
 451     // processed.  If the flag has the popframe_processing bit set, it
 452     // means that this code is called *during* popframe handling - we
 453     // don't want to reenter.
 454     // This method is only called just after the call into the vm in

 849 // Apply stack watermark barrier.
 850 // Notify JVMTI.
 851 // Remove the activation from the stack.
 852 //
 853 // If there are locked Java monitors
 854 //    If throw_monitor_exception
 855 //       throws IllegalMonitorStateException
 856 //    Else if install_monitor_exception
 857 //       installs IllegalMonitorStateException
 858 //    Else
 859 //       no error processing
 860 void InterpreterMacroAssembler::remove_activation(TosState state,
 861                                                   Register ret_addr,
 862                                                   bool throw_monitor_exception,
 863                                                   bool install_monitor_exception,
 864                                                   bool notify_jvmdi) {
 865   // Note: Registers rdx xmm0 may be in use for the
 866   // result check if synchronized method
 867   Label unlocked, unlock, no_unlock;
 868 
 869 #ifdef ASSERT
 870   Label not_preempted;
 871   cmpptr(Address(r15_thread, JavaThread::preempt_alternate_return_offset()), NULL_WORD);
 872   jcc(Assembler::equal, not_preempted);
 873   stop("remove_activation: should not have alternate return address set");
 874   bind(not_preempted);
 875 #endif /* ASSERT */
 876 
 877   const Register rthread = r15_thread;
 878   const Register robj    = c_rarg1;
 879   const Register rmon    = c_rarg1;
 880 
 881   // get the value of _do_not_unlock_if_synchronized into rdx
 882   const Address do_not_unlock_if_synchronized(rthread,
 883     in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
 884   movbool(rbx, do_not_unlock_if_synchronized);
 885   movbool(do_not_unlock_if_synchronized, false); // reset the flag
 886 
 887  // get method access flags
 888   movptr(rcx, Address(rbp, frame::interpreter_frame_method_offset * wordSize));
 889   load_unsigned_short(rcx, Address(rcx, Method::access_flags_offset()));
 890   testl(rcx, JVM_ACC_SYNCHRONIZED);
 891   jcc(Assembler::zero, unlocked);
 892 
 893   // Don't unlock anything if the _do_not_unlock_if_synchronized flag
 894   // is set.
 895   testbool(rbx);
 896   jcc(Assembler::notZero, no_unlock);
< prev index next >