< prev index next >

src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp

Print this page

1687 
1688       // src -> dest if dest == x10 else x10 <- dest
1689       __ cmpxchg_obj_header(x10, lock_reg, obj_reg, t0, count, /*fallthrough*/nullptr);
1690 
1691       // Test if the oopMark is an obvious stack pointer, i.e.,
1692       //  1) (mark & 3) == 0, and
1693       //  2) sp <= mark < mark + os::pagesize()
1694       // These 3 tests can be done by evaluating the following
1695       // expression: ((mark - sp) & (3 - os::vm_page_size())),
1696       // assuming both stack pointer and pagesize have their
1697       // least significant 2 bits clear.
1698       // NOTE: the oopMark is in swap_reg % 10 as the result of cmpxchg
1699 
1700       __ sub(swap_reg, swap_reg, sp);
1701       __ andi(swap_reg, swap_reg, 3 - (int)os::vm_page_size());
1702 
1703       // Save the test result, for recursive case, the result is zero
1704       __ sd(swap_reg, Address(lock_reg, mark_word_offset));
1705       __ bnez(swap_reg, slow_path_lock);
1706     } else {
1707       assert(LockingMode == LM_LIGHTWEIGHT, "");
1708       __ ld(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1709       __ lightweight_lock(obj_reg, swap_reg, tmp, lock_tmp, slow_path_lock);
1710     }
1711 
1712     __ bind(count);
1713     __ increment(Address(xthread, JavaThread::held_monitor_count_offset()));
1714 
1715     // Slow path will re-enter here
1716     __ bind(lock_done);
1717   }
1718 
1719 
1720   // Finally just about ready to make the JNI call
1721 
1722   // get JNIEnv* which is first argument to native
1723   __ la(c_rarg0, Address(xthread, in_bytes(JavaThread::jni_environment_offset())));
1724 
1725   // Now set thread in native
1726   __ la(t1, Address(xthread, JavaThread::thread_state_offset()));
1727   __ mv(t0, _thread_in_native);
1728   __ membar(MacroAssembler::LoadStore | MacroAssembler::StoreStore);

1814     // Must save x10 if if it is live now because cmpxchg must use it
1815     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1816       save_native_result(masm, ret_type, stack_slots);
1817     }
1818 
1819     if (LockingMode == LM_MONITOR) {
1820       __ j(slow_path_unlock);
1821     } else if (LockingMode == LM_LEGACY) {
1822       // get address of the stack lock
1823       __ la(x10, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1824       //  get old displaced header
1825       __ ld(old_hdr, Address(x10, 0));
1826 
1827       // Atomic swap old header if oop still contains the stack lock
1828       Label count;
1829       __ cmpxchg_obj_header(x10, old_hdr, obj_reg, t0, count, &slow_path_unlock);
1830       __ bind(count);
1831       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1832     } else {
1833       assert(LockingMode == LM_LIGHTWEIGHT, "");
1834       __ ld(old_hdr, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1835       __ test_bit(t0, old_hdr, exact_log2(markWord::monitor_value));
1836       __ bnez(t0, slow_path_unlock);
1837       __ lightweight_unlock(obj_reg, old_hdr, swap_reg, lock_tmp, slow_path_unlock);
1838       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1839     }
1840 
1841     // slow path re-enters here
1842     __ bind(unlock_done);
1843     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1844       restore_native_result(masm, ret_type, stack_slots);
1845     }
1846 
1847     __ bind(done);
1848   }
1849 
1850   Label dtrace_method_exit, dtrace_method_exit_done;
1851   {
1852     ExternalAddress target((address)&DTraceMethodProbes);
1853     __ relocate(target.rspec(), [&] {
1854       int32_t offset;
1855       __ la_patchable(t0, target, offset);
1856       __ lbu(t0, Address(t0, offset));

1687 
1688       // src -> dest if dest == x10 else x10 <- dest
1689       __ cmpxchg_obj_header(x10, lock_reg, obj_reg, t0, count, /*fallthrough*/nullptr);
1690 
1691       // Test if the oopMark is an obvious stack pointer, i.e.,
1692       //  1) (mark & 3) == 0, and
1693       //  2) sp <= mark < mark + os::pagesize()
1694       // These 3 tests can be done by evaluating the following
1695       // expression: ((mark - sp) & (3 - os::vm_page_size())),
1696       // assuming both stack pointer and pagesize have their
1697       // least significant 2 bits clear.
1698       // NOTE: the oopMark is in swap_reg % 10 as the result of cmpxchg
1699 
1700       __ sub(swap_reg, swap_reg, sp);
1701       __ andi(swap_reg, swap_reg, 3 - (int)os::vm_page_size());
1702 
1703       // Save the test result, for recursive case, the result is zero
1704       __ sd(swap_reg, Address(lock_reg, mark_word_offset));
1705       __ bnez(swap_reg, slow_path_lock);
1706     } else {
1707       assert(LockingMode == LM_LIGHTWEIGHT, "must be");

1708       __ lightweight_lock(obj_reg, swap_reg, tmp, lock_tmp, slow_path_lock);
1709     }
1710 
1711     __ bind(count);
1712     __ increment(Address(xthread, JavaThread::held_monitor_count_offset()));
1713 
1714     // Slow path will re-enter here
1715     __ bind(lock_done);
1716   }
1717 
1718 
1719   // Finally just about ready to make the JNI call
1720 
1721   // get JNIEnv* which is first argument to native
1722   __ la(c_rarg0, Address(xthread, in_bytes(JavaThread::jni_environment_offset())));
1723 
1724   // Now set thread in native
1725   __ la(t1, Address(xthread, JavaThread::thread_state_offset()));
1726   __ mv(t0, _thread_in_native);
1727   __ membar(MacroAssembler::LoadStore | MacroAssembler::StoreStore);

1813     // Must save x10 if if it is live now because cmpxchg must use it
1814     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1815       save_native_result(masm, ret_type, stack_slots);
1816     }
1817 
1818     if (LockingMode == LM_MONITOR) {
1819       __ j(slow_path_unlock);
1820     } else if (LockingMode == LM_LEGACY) {
1821       // get address of the stack lock
1822       __ la(x10, Address(sp, lock_slot_offset * VMRegImpl::stack_slot_size));
1823       //  get old displaced header
1824       __ ld(old_hdr, Address(x10, 0));
1825 
1826       // Atomic swap old header if oop still contains the stack lock
1827       Label count;
1828       __ cmpxchg_obj_header(x10, old_hdr, obj_reg, t0, count, &slow_path_unlock);
1829       __ bind(count);
1830       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1831     } else {
1832       assert(LockingMode == LM_LIGHTWEIGHT, "");



1833       __ lightweight_unlock(obj_reg, old_hdr, swap_reg, lock_tmp, slow_path_unlock);
1834       __ decrement(Address(xthread, JavaThread::held_monitor_count_offset()));
1835     }
1836 
1837     // slow path re-enters here
1838     __ bind(unlock_done);
1839     if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
1840       restore_native_result(masm, ret_type, stack_slots);
1841     }
1842 
1843     __ bind(done);
1844   }
1845 
1846   Label dtrace_method_exit, dtrace_method_exit_done;
1847   {
1848     ExternalAddress target((address)&DTraceMethodProbes);
1849     __ relocate(target.rspec(), [&] {
1850       int32_t offset;
1851       __ la_patchable(t0, target, offset);
1852       __ lbu(t0, Address(t0, offset));
< prev index next >