< prev index next >

src/hotspot/cpu/riscv/interp_masm_riscv.cpp

Print this page

 709   // If we're returning to interpreted code we will shortly be
 710   // adjusting SP to allow some space for ESP.  If we're returning to
 711   // compiled code the saved sender SP was saved in sender_sp, so this
 712   // restores it.
 713   andi(sp, esp, -16);
 714 }
 715 
 716 // Lock object
 717 //
 718 // Args:
 719 //      c_rarg1: BasicObjectLock to be used for locking
 720 //
 721 // Kills:
 722 //      x10
 723 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, c_rarg4, c_rarg5, .. (param regs)
 724 //      t0, t1 (temp regs)
 725 void InterpreterMacroAssembler::lock_object(Register lock_reg)
 726 {
 727   assert(lock_reg == c_rarg1, "The argument is only for looks. It must be c_rarg1");
 728   if (LockingMode == LM_MONITOR) {
 729     call_VM(noreg,
 730             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 731             lock_reg);
 732   } else {
 733     Label count, done;
 734 
 735     const Register swap_reg = x10;
 736     const Register tmp = c_rarg2;
 737     const Register obj_reg = c_rarg3; // Will contain the oop
 738     const Register tmp2 = c_rarg4;
 739     const Register tmp3 = c_rarg5;
 740 
 741     const int obj_offset = in_bytes(BasicObjectLock::obj_offset());
 742     const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
 743     const int mark_offset = lock_offset +
 744                             BasicLock::displaced_header_offset_in_bytes();
 745 
 746     Label slow_case;
 747 
 748     // Load object pointer into obj_reg c_rarg3
 749     ld(obj_reg, Address(lock_reg, obj_offset));
 750 
 751     if (DiagnoseSyncOnValueBasedClasses != 0) {
 752       load_klass(tmp, obj_reg);
 753       lbu(tmp, Address(tmp, Klass::misc_flags_offset()));
 754       test_bit(tmp, tmp, exact_log2(KlassFlags::_misc_is_value_based_class));
 755       bnez(tmp, slow_case);
 756     }
 757 
 758     if (LockingMode == LM_LIGHTWEIGHT) {
 759       lightweight_lock(lock_reg, obj_reg, tmp, tmp2, tmp3, slow_case);
 760       j(count);
 761     } else if (LockingMode == LM_LEGACY) {
 762       // Load (object->mark() | 1) into swap_reg
 763       ld(t0, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
 764       ori(swap_reg, t0, 1);
 765 
 766       // Save (object->mark() | 1) into BasicLock's displaced header
 767       sd(swap_reg, Address(lock_reg, mark_offset));
 768 
 769       assert(lock_offset == 0,
 770              "displached header must be first word in BasicObjectLock");
 771 
 772       cmpxchg_obj_header(swap_reg, lock_reg, obj_reg, tmp, count, /*fallthrough*/nullptr);
 773 
 774       // Test if the oopMark is an obvious stack pointer, i.e.,
 775       //  1) (mark & 7) == 0, and
 776       //  2) sp <= mark < mark + os::pagesize()
 777       //
 778       // These 3 tests can be done by evaluating the following
 779       // expression: ((mark - sp) & (7 - os::vm_page_size())),
 780       // assuming both stack pointer and pagesize have their
 781       // least significant 3 bits clear.
 782       // NOTE: the oopMark is in swap_reg x10 as the result of cmpxchg
 783       sub(swap_reg, swap_reg, sp);
 784       mv(t0, (int64_t)(7 - (int)os::vm_page_size()));
 785       andr(swap_reg, swap_reg, t0);
 786 
 787       // Save the test result, for recursive case, the result is zero
 788       sd(swap_reg, Address(lock_reg, mark_offset));
 789       beqz(swap_reg, count);




 790     }
 791 
 792     bind(slow_case);
 793 
 794     // Call the runtime routine for slow case
 795     call_VM(noreg,
 796             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 797             lock_reg);
 798     j(done);
 799 
 800     bind(count);
 801     increment(Address(xthread, JavaThread::held_monitor_count_offset()));
 802 
 803     bind(done);
 804   }
 805 }
 806 
 807 
 808 // Unlocks an object. Used in monitorexit bytecode and
 809 // remove_activation.  Throws an IllegalMonitorException if object is
 810 // not locked by current thread.
 811 //
 812 // Args:
 813 //      c_rarg1: BasicObjectLock for lock
 814 //
 815 // Kills:
 816 //      x10
 817 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, c_rarg4, ... (param regs)
 818 //      t0, t1 (temp regs)
 819 void InterpreterMacroAssembler::unlock_object(Register lock_reg)
 820 {
 821   assert(lock_reg == c_rarg1, "The argument is only for looks. It must be rarg1");

 827 
 828     const Register swap_reg   = x10;
 829     const Register header_reg = c_rarg2;  // Will contain the old oopMark
 830     const Register obj_reg    = c_rarg3;  // Will contain the oop
 831     const Register tmp_reg    = c_rarg4;  // Temporary used by lightweight_unlock
 832 
 833     save_bcp(); // Save in case of exception
 834 
 835     if (LockingMode != LM_LIGHTWEIGHT) {
 836       // Convert from BasicObjectLock structure to object and BasicLock
 837       // structure Store the BasicLock address into x10
 838       la(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
 839     }
 840 
 841     // Load oop into obj_reg(c_rarg3)
 842     ld(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
 843 
 844     // Free entry
 845     sd(zr, Address(lock_reg, BasicObjectLock::obj_offset()));
 846 

 847     if (LockingMode == LM_LIGHTWEIGHT) {
 848       Label slow_case;
 849       lightweight_unlock(obj_reg, header_reg, swap_reg, tmp_reg, slow_case);
 850       j(count);
 851 
 852       bind(slow_case);
 853     } else if (LockingMode == LM_LEGACY) {
 854       // Load the old header from BasicLock structure
 855       ld(header_reg, Address(swap_reg,
 856                              BasicLock::displaced_header_offset_in_bytes()));
 857 
 858       // Test for recursion
 859       beqz(header_reg, count);
 860 
 861       // Atomic swap back the old header
 862       cmpxchg_obj_header(swap_reg, header_reg, obj_reg, tmp_reg, count, /*fallthrough*/nullptr);




 863     }
 864 

 865     // Call the runtime routine for slow case.
 866     sd(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset())); // restore obj
 867     call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), lock_reg);
 868 
 869     j(done);
 870 
 871     bind(count);
 872     decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
 873 
 874     bind(done);
 875 
 876     restore_bcp();
 877   }
 878 }
 879 
 880 
 881 void InterpreterMacroAssembler::test_method_data_pointer(Register mdp,
 882                                                          Label& zero_continue) {
 883   assert(ProfileInterpreter, "must be profiling interpreter");
 884   ld(mdp, Address(fp, frame::interpreter_frame_mdp_offset * wordSize));
 885   beqz(mdp, zero_continue);
 886 }
 887 
 888 // Set the method data pointer for the current bcp.
 889 void InterpreterMacroAssembler::set_method_data_pointer_for_bcp() {
 890   assert(ProfileInterpreter, "must be profiling interpreter");
 891   Label set_mdp;
 892   push_reg(RegSet::of(x10, x11), sp); // save x10, x11
 893 
 894   // Test MDO to avoid the call if it is null.
 895   ld(x10, Address(xmethod, in_bytes(Method::method_data_offset())));

1563   save_bcp();
1564 #ifdef ASSERT
1565   {
1566     Label L;
1567     ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1568     beqz(t0, L);
1569     stop("InterpreterMacroAssembler::call_VM_base:"
1570          " last_sp isn't null");
1571     bind(L);
1572   }
1573 #endif /* ASSERT */
1574   // super call
1575   MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1576                                entry_point, number_of_arguments,
1577                                check_exceptions);
1578 // interpreter specific
1579   restore_bcp();
1580   restore_locals();
1581 }
1582 






































1583 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr, Register tmp) {
1584   assert_different_registers(obj, tmp, t0, mdo_addr.base());
1585   Label update, next, none;
1586 
1587   verify_oop(obj);
1588 
1589   bnez(obj, update);
1590   orptr(mdo_addr, TypeEntries::null_seen, t0, tmp);
1591   j(next);
1592 
1593   bind(update);
1594   load_klass(obj, obj);
1595 
1596   ld(tmp, mdo_addr);
1597   xorr(obj, obj, tmp);
1598   andi(t0, obj, TypeEntries::type_klass_mask);
1599   beqz(t0, next); // klass seen before, nothing to
1600                   // do. The unknown bit may have been
1601                   // set already but no need to check.
1602 

 709   // If we're returning to interpreted code we will shortly be
 710   // adjusting SP to allow some space for ESP.  If we're returning to
 711   // compiled code the saved sender SP was saved in sender_sp, so this
 712   // restores it.
 713   andi(sp, esp, -16);
 714 }
 715 
 716 // Lock object
 717 //
 718 // Args:
 719 //      c_rarg1: BasicObjectLock to be used for locking
 720 //
 721 // Kills:
 722 //      x10
 723 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, c_rarg4, c_rarg5, .. (param regs)
 724 //      t0, t1 (temp regs)
 725 void InterpreterMacroAssembler::lock_object(Register lock_reg)
 726 {
 727   assert(lock_reg == c_rarg1, "The argument is only for looks. It must be c_rarg1");
 728   if (LockingMode == LM_MONITOR) {
 729     call_VM_preemptable(noreg,
 730             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 731             lock_reg);
 732   } else {
 733     Label count, done;
 734 
 735     const Register swap_reg = x10;
 736     const Register tmp = c_rarg2;
 737     const Register obj_reg = c_rarg3; // Will contain the oop
 738     const Register tmp2 = c_rarg4;
 739     const Register tmp3 = c_rarg5;
 740 
 741     const int obj_offset = in_bytes(BasicObjectLock::obj_offset());
 742     const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
 743     const int mark_offset = lock_offset +
 744                             BasicLock::displaced_header_offset_in_bytes();
 745 
 746     Label slow_case;
 747 
 748     // Load object pointer into obj_reg c_rarg3
 749     ld(obj_reg, Address(lock_reg, obj_offset));
 750 
 751     if (DiagnoseSyncOnValueBasedClasses != 0) {
 752       load_klass(tmp, obj_reg);
 753       lbu(tmp, Address(tmp, Klass::misc_flags_offset()));
 754       test_bit(tmp, tmp, exact_log2(KlassFlags::_misc_is_value_based_class));
 755       bnez(tmp, slow_case);
 756     }
 757 
 758     if (LockingMode == LM_LIGHTWEIGHT) {
 759       lightweight_lock(lock_reg, obj_reg, tmp, tmp2, tmp3, slow_case);
 760       j(done);
 761     } else if (LockingMode == LM_LEGACY) {
 762       // Load (object->mark() | 1) into swap_reg
 763       ld(t0, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
 764       ori(swap_reg, t0, 1);
 765 
 766       // Save (object->mark() | 1) into BasicLock's displaced header
 767       sd(swap_reg, Address(lock_reg, mark_offset));
 768 
 769       assert(lock_offset == 0,
 770              "displached header must be first word in BasicObjectLock");
 771 
 772       cmpxchg_obj_header(swap_reg, lock_reg, obj_reg, tmp, count, /*fallthrough*/nullptr);
 773 
 774       // Test if the oopMark is an obvious stack pointer, i.e.,
 775       //  1) (mark & 7) == 0, and
 776       //  2) sp <= mark < mark + os::pagesize()
 777       //
 778       // These 3 tests can be done by evaluating the following
 779       // expression: ((mark - sp) & (7 - os::vm_page_size())),
 780       // assuming both stack pointer and pagesize have their
 781       // least significant 3 bits clear.
 782       // NOTE: the oopMark is in swap_reg x10 as the result of cmpxchg
 783       sub(swap_reg, swap_reg, sp);
 784       mv(t0, (int64_t)(7 - (int)os::vm_page_size()));
 785       andr(swap_reg, swap_reg, t0);
 786 
 787       // Save the test result, for recursive case, the result is zero
 788       sd(swap_reg, Address(lock_reg, mark_offset));
 789       bnez(swap_reg, slow_case);
 790 
 791       bind(count);
 792       inc_held_monitor_count();
 793       j(done);
 794     }
 795 
 796     bind(slow_case);
 797 
 798     // Call the runtime routine for slow case
 799     call_VM_preemptable(noreg,
 800             CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
 801             lock_reg);




 802 
 803     bind(done);
 804   }
 805 }
 806 
 807 
 808 // Unlocks an object. Used in monitorexit bytecode and
 809 // remove_activation.  Throws an IllegalMonitorException if object is
 810 // not locked by current thread.
 811 //
 812 // Args:
 813 //      c_rarg1: BasicObjectLock for lock
 814 //
 815 // Kills:
 816 //      x10
 817 //      c_rarg0, c_rarg1, c_rarg2, c_rarg3, c_rarg4, ... (param regs)
 818 //      t0, t1 (temp regs)
 819 void InterpreterMacroAssembler::unlock_object(Register lock_reg)
 820 {
 821   assert(lock_reg == c_rarg1, "The argument is only for looks. It must be rarg1");

 827 
 828     const Register swap_reg   = x10;
 829     const Register header_reg = c_rarg2;  // Will contain the old oopMark
 830     const Register obj_reg    = c_rarg3;  // Will contain the oop
 831     const Register tmp_reg    = c_rarg4;  // Temporary used by lightweight_unlock
 832 
 833     save_bcp(); // Save in case of exception
 834 
 835     if (LockingMode != LM_LIGHTWEIGHT) {
 836       // Convert from BasicObjectLock structure to object and BasicLock
 837       // structure Store the BasicLock address into x10
 838       la(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
 839     }
 840 
 841     // Load oop into obj_reg(c_rarg3)
 842     ld(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
 843 
 844     // Free entry
 845     sd(zr, Address(lock_reg, BasicObjectLock::obj_offset()));
 846 
 847     Label slow_case;
 848     if (LockingMode == LM_LIGHTWEIGHT) {

 849       lightweight_unlock(obj_reg, header_reg, swap_reg, tmp_reg, slow_case);
 850       j(done);


 851     } else if (LockingMode == LM_LEGACY) {
 852       // Load the old header from BasicLock structure
 853       ld(header_reg, Address(swap_reg,
 854                              BasicLock::displaced_header_offset_in_bytes()));
 855 
 856       // Test for recursion
 857       beqz(header_reg, count);
 858 
 859       // Atomic swap back the old header
 860       cmpxchg_obj_header(swap_reg, header_reg, obj_reg, tmp_reg, count, &slow_case);
 861 
 862       bind(count);
 863       dec_held_monitor_count();
 864       j(done);
 865     }
 866 
 867     bind(slow_case);
 868     // Call the runtime routine for slow case.
 869     sd(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset())); // restore obj
 870     call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), lock_reg);
 871 





 872     bind(done);

 873     restore_bcp();
 874   }
 875 }
 876 
 877 
 878 void InterpreterMacroAssembler::test_method_data_pointer(Register mdp,
 879                                                          Label& zero_continue) {
 880   assert(ProfileInterpreter, "must be profiling interpreter");
 881   ld(mdp, Address(fp, frame::interpreter_frame_mdp_offset * wordSize));
 882   beqz(mdp, zero_continue);
 883 }
 884 
 885 // Set the method data pointer for the current bcp.
 886 void InterpreterMacroAssembler::set_method_data_pointer_for_bcp() {
 887   assert(ProfileInterpreter, "must be profiling interpreter");
 888   Label set_mdp;
 889   push_reg(RegSet::of(x10, x11), sp); // save x10, x11
 890 
 891   // Test MDO to avoid the call if it is null.
 892   ld(x10, Address(xmethod, in_bytes(Method::method_data_offset())));

1560   save_bcp();
1561 #ifdef ASSERT
1562   {
1563     Label L;
1564     ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1565     beqz(t0, L);
1566     stop("InterpreterMacroAssembler::call_VM_base:"
1567          " last_sp isn't null");
1568     bind(L);
1569   }
1570 #endif /* ASSERT */
1571   // super call
1572   MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1573                                entry_point, number_of_arguments,
1574                                check_exceptions);
1575 // interpreter specific
1576   restore_bcp();
1577   restore_locals();
1578 }
1579 
1580 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1581                                                     address entry_point,
1582                                                     Register arg_1) {
1583   assert(arg_1 == c_rarg1, "");
1584   Label resume_pc, not_preempted;
1585 
1586   push_cont_fastpath();
1587 
1588   // Make VM call. In case of preemption set last_pc to
1589   // the one we want to resume to.
1590   la(t0, resume_pc);
1591   sd(t0, Address(xthread, JavaThread::last_Java_pc_offset()));
1592   call_VM_base(oop_result, noreg, noreg, entry_point, 1, false /*check_exceptions*/);
1593 
1594   pop_cont_fastpath();
1595 
1596   // Check if preempted
1597   ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1598   beqz(t0, not_preempted);
1599   sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1600   jr(t0);
1601 
1602   bind(resume_pc);
1603   restore_after_resume(false /* is_native */);
1604 
1605   bind(not_preempted);
1606 }
1607 
1608 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
1609   la(t0, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
1610   jalr(t0);
1611   if (is_native) {
1612     // On resume we need to set up stack as expected
1613     push(dtos);
1614     push(ltos);
1615   }
1616 }
1617 
1618 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr, Register tmp) {
1619   assert_different_registers(obj, tmp, t0, mdo_addr.base());
1620   Label update, next, none;
1621 
1622   verify_oop(obj);
1623 
1624   bnez(obj, update);
1625   orptr(mdo_addr, TypeEntries::null_seen, t0, tmp);
1626   j(next);
1627 
1628   bind(update);
1629   load_klass(obj, obj);
1630 
1631   ld(tmp, mdo_addr);
1632   xorr(obj, obj, tmp);
1633   andi(t0, obj, TypeEntries::type_klass_mask);
1634   beqz(t0, next); // klass seen before, nothing to
1635                   // do. The unknown bit may have been
1636                   // set already but no need to check.
1637 
< prev index next >