< prev index next >

src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp

Print this page

1683 
1684       // src -> dest if dest == x10 else x10 <- dest
1685       __ cmpxchg_obj_header(x10, lock_reg, obj_reg, t0, count, /*fallthrough*/nullptr);
1686 
1687       // Test if the oopMark is an obvious stack pointer, i.e.,
1688       //  1) (mark & 3) == 0, and
1689       //  2) sp <= mark < mark + os::pagesize()
1690       // These 3 tests can be done by evaluating the following
1691       // expression: ((mark - sp) & (3 - os::vm_page_size())),
1692       // assuming both stack pointer and pagesize have their
1693       // least significant 2 bits clear.
1694       // NOTE: the oopMark is in swap_reg % 10 as the result of cmpxchg
1695 
1696       __ sub(swap_reg, swap_reg, sp);
1697       __ andi(swap_reg, swap_reg, 3 - (int)os::vm_page_size());
1698 
1699       // Save the test result, for recursive case, the result is zero
1700       __ sd(swap_reg, Address(lock_reg, mark_word_offset));
1701       __ bnez(swap_reg, slow_path_lock);
1702     } else {
1703       assert(LockingMode == LM_LIGHTWEIGHT, "");
1704       __ ld(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1705       __ lightweight_lock(obj_reg, swap_reg, tmp, lock_tmp, slow_path_lock);
1706     }
1707 
1708     __ bind(count);
1709     __ increment(Address(xthread, JavaThread::held_monitor_count_offset()));
1710 
1711     // Slow path will re-enter here
1712     __ bind(lock_done);
1713   }
1714 
1715 
1716   // Finally just about ready to make the JNI call
1717 
1718   // get JNIEnv* which is first argument to native
1719   __ la(c_rarg0, Address(xthread, in_bytes(JavaThread::jni_environment_offset())));
1720 
1721   // Now set thread in native
1722   __ la(t1, Address(xthread, JavaThread::thread_state_offset()));
1723   __ mv(t0, _thread_in_native);
1724   __ membar(MacroAssembler::LoadStore | MacroAssembler::StoreStore);

1810     // Must save x10 if if it is live now because cmpxchg must use it
1811     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1812       save_native_result(masm, ret_type, stack_slots);
1813     }
1814 
1815     if (LockingMode == LM_MONITOR) {
1816       __ j(slow_path_unlock);
1817     } else if (LockingMode == LM_LEGACY) {
1818       // get address of the stack lock
1819       __ la(x10, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1820       //  get old displaced header
1821       __ ld(old_hdr, Address(x10, 0));
1822 
1823       // Atomic swap old header if oop still contains the stack lock
1824       Label count;
1825       __ cmpxchg_obj_header(x10, old_hdr, obj_reg, t0, count, &slow_path_unlock);
1826       __ bind(count);
1827       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1828     } else {
1829       assert(LockingMode == LM_LIGHTWEIGHT, "");
1830       __ ld(old_hdr, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1831       __ test_bit(t0, old_hdr, exact_log2(markWord::monitor_value));
1832       __ bnez(t0, slow_path_unlock);
1833       __ lightweight_unlock(obj_reg, old_hdr, swap_reg, lock_tmp, slow_path_unlock);
1834       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1835     }
1836 
1837     // slow path re-enters here
1838     __ bind(unlock_done);
1839     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1840       restore_native_result(masm, ret_type, stack_slots);
1841     }
1842 
1843     __ bind(done);
1844   }
1845 
1846   Label dtrace_method_exit, dtrace_method_exit_done;
1847   {
1848     ExternalAddress target((address)&DTraceMethodProbes);
1849     __ relocate(target.rspec(), [&] {
1850       int32_t offset;
1851       __ la_patchable(t0, target, offset);
1852       __ lbu(t0, Address(t0, offset));

1683 
1684       // src -> dest if dest == x10 else x10 <- dest
1685       __ cmpxchg_obj_header(x10, lock_reg, obj_reg, t0, count, /*fallthrough*/nullptr);
1686 
1687       // Test if the oopMark is an obvious stack pointer, i.e.,
1688       //  1) (mark & 3) == 0, and
1689       //  2) sp <= mark < mark + os::pagesize()
1690       // These 3 tests can be done by evaluating the following
1691       // expression: ((mark - sp) & (3 - os::vm_page_size())),
1692       // assuming both stack pointer and pagesize have their
1693       // least significant 2 bits clear.
1694       // NOTE: the oopMark is in swap_reg % 10 as the result of cmpxchg
1695 
1696       __ sub(swap_reg, swap_reg, sp);
1697       __ andi(swap_reg, swap_reg, 3 - (int)os::vm_page_size());
1698 
1699       // Save the test result, for recursive case, the result is zero
1700       __ sd(swap_reg, Address(lock_reg, mark_word_offset));
1701       __ bnez(swap_reg, slow_path_lock);
1702     } else {
1703       assert(LockingMode == LM_LIGHTWEIGHT, "must be");

1704       __ lightweight_lock(obj_reg, swap_reg, tmp, lock_tmp, slow_path_lock);
1705     }
1706 
1707     __ bind(count);
1708     __ increment(Address(xthread, JavaThread::held_monitor_count_offset()));
1709 
1710     // Slow path will re-enter here
1711     __ bind(lock_done);
1712   }
1713 
1714 
1715   // Finally just about ready to make the JNI call
1716 
1717   // get JNIEnv* which is first argument to native
1718   __ la(c_rarg0, Address(xthread, in_bytes(JavaThread::jni_environment_offset())));
1719 
1720   // Now set thread in native
1721   __ la(t1, Address(xthread, JavaThread::thread_state_offset()));
1722   __ mv(t0, _thread_in_native);
1723   __ membar(MacroAssembler::LoadStore | MacroAssembler::StoreStore);

1809     // Must save x10 if if it is live now because cmpxchg must use it
1810     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1811       save_native_result(masm, ret_type, stack_slots);
1812     }
1813 
1814     if (LockingMode == LM_MONITOR) {
1815       __ j(slow_path_unlock);
1816     } else if (LockingMode == LM_LEGACY) {
1817       // get address of the stack lock
1818       __ la(x10, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1819       //  get old displaced header
1820       __ ld(old_hdr, Address(x10, 0));
1821 
1822       // Atomic swap old header if oop still contains the stack lock
1823       Label count;
1824       __ cmpxchg_obj_header(x10, old_hdr, obj_reg, t0, count, &slow_path_unlock);
1825       __ bind(count);
1826       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1827     } else {
1828       assert(LockingMode == LM_LIGHTWEIGHT, "");



1829       __ lightweight_unlock(obj_reg, old_hdr, swap_reg, lock_tmp, slow_path_unlock);
1830       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1831     }
1832 
1833     // slow path re-enters here
1834     __ bind(unlock_done);
1835     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1836       restore_native_result(masm, ret_type, stack_slots);
1837     }
1838 
1839     __ bind(done);
1840   }
1841 
1842   Label dtrace_method_exit, dtrace_method_exit_done;
1843   {
1844     ExternalAddress target((address)&DTraceMethodProbes);
1845     __ relocate(target.rspec(), [&] {
1846       int32_t offset;
1847       __ la_patchable(t0, target, offset);
1848       __ lbu(t0, Address(t0, offset));
< prev index next >