17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/vmIntrinsics.hpp"
28 #include "compiler/oopMap.hpp"
29 #include "gc/shared/barrierSet.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "gc/shared/barrierSetNMethod.hpp"
32 #include "gc/shared/gc_globals.hpp"
33 #include "memory/universe.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "prims/upcallLinker.hpp"
36 #include "runtime/arguments.hpp"
37 #include "runtime/javaThread.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #include "runtime/stubRoutines.hpp"
40 #include "stubGenerator_x86_64.hpp"
41 #ifdef COMPILER2
42 #include "opto/runtime.hpp"
43 #include "opto/c2_globals.hpp"
44 #endif
45 #if INCLUDE_JVMCI
46 #include "jvmci/jvmci_globals.hpp"
47 #endif
48 #if INCLUDE_JFR
49 #include "jfr/support/jfrIntrinsics.hpp"
50 #endif
51
52 // For a more detailed description of the stub routine structure
53 // see the comment in stubRoutines.hpp
54
55 #define __ _masm->
56 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
3761 __ ret(0);
3762 }
3763
3764 return start;
3765 }
3766
3767 address StubGenerator::generate_cont_thaw() {
3768 return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
3769 }
3770
3771 // TODO: will probably need multiple return barriers depending on return type
3772
3773 address StubGenerator::generate_cont_returnBarrier() {
3774 return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
3775 }
3776
3777 address StubGenerator::generate_cont_returnBarrier_exception() {
3778 return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
3779 }
3780
3781 #if INCLUDE_JFR
3782
3783 // For c2: c_rarg0 is junk, call to runtime to write a checkpoint.
3784 // It returns a jobject handle to the event writer.
3785 // The handle is dereferenced and the return value is the event writer oop.
3786 RuntimeStub* StubGenerator::generate_jfr_write_checkpoint() {
3787 enum layout {
3788 rbp_off,
3789 rbpH_off,
3790 return_off,
3791 return_off2,
3792 framesize // inclusive of return address
3793 };
3794
3795 CodeBuffer code("jfr_write_checkpoint", 1024, 64);
3796 MacroAssembler* _masm = new MacroAssembler(&code);
3797 address start = __ pc();
3798
3799 __ enter();
3800 address the_pc = __ pc();
4114 if (VM_Version::supports_float16()) {
4115 // For results consistency both intrinsics should be enabled.
4116 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
4117 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
4118 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
4119 StubRoutines::_hf2f = generate_float16ToFloat();
4120 StubRoutines::_f2hf = generate_floatToFloat16();
4121 }
4122 }
4123
4124 generate_libm_stubs();
4125
4126 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4127 }
4128
4129 void StubGenerator::generate_continuation_stubs() {
4130 // Continuation stubs:
4131 StubRoutines::_cont_thaw = generate_cont_thaw();
4132 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4133 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4134
4135 JFR_ONLY(generate_jfr_stubs();)
4136 }
4137
4138 #if INCLUDE_JFR
4139 void StubGenerator::generate_jfr_stubs() {
4140 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
4141 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4142 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4143 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4144 }
4145 #endif
4146
4147 void StubGenerator::generate_final_stubs() {
4148 // Generates the rest of stubs and initializes the entry points
4149
4150 // These entry points require SharedInfo::stack0 to be set up in
4151 // non-core builds and need to be relocatable, so they each
4152 // fabricate a RuntimeStub internally.
4153 StubRoutines::_throw_AbstractMethodError_entry =
|
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/vmIntrinsics.hpp"
28 #include "compiler/oopMap.hpp"
29 #include "gc/shared/barrierSet.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "gc/shared/barrierSetNMethod.hpp"
32 #include "gc/shared/gc_globals.hpp"
33 #include "memory/universe.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "prims/upcallLinker.hpp"
36 #include "runtime/arguments.hpp"
37 #include "runtime/continuationEntry.hpp"
38 #include "runtime/javaThread.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #include "runtime/stubRoutines.hpp"
41 #include "stubGenerator_x86_64.hpp"
42 #ifdef COMPILER2
43 #include "opto/runtime.hpp"
44 #include "opto/c2_globals.hpp"
45 #endif
46 #if INCLUDE_JVMCI
47 #include "jvmci/jvmci_globals.hpp"
48 #endif
49 #if INCLUDE_JFR
50 #include "jfr/support/jfrIntrinsics.hpp"
51 #endif
52
53 // For a more detailed description of the stub routine structure
54 // see the comment in stubRoutines.hpp
55
56 #define __ _masm->
57 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
3762 __ ret(0);
3763 }
3764
3765 return start;
3766 }
3767
3768 address StubGenerator::generate_cont_thaw() {
3769 return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
3770 }
3771
3772 // TODO: will probably need multiple return barriers depending on return type
3773
3774 address StubGenerator::generate_cont_returnBarrier() {
3775 return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
3776 }
3777
3778 address StubGenerator::generate_cont_returnBarrier_exception() {
3779 return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
3780 }
3781
3782 address StubGenerator::generate_cont_preempt_stub() {
3783 if (!Continuations::enabled()) return nullptr;
3784 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
3785 address start = __ pc();
3786
3787 #ifdef ASSERT
3788 __ push(rax);
3789 { Label L;
3790 __ get_thread(rax);
3791 __ cmpptr(r15_thread, rax);
3792 __ jcc(Assembler::equal, L);
3793 __ stop("r15 should have been preserved across VM call");
3794 __ bind(L);
3795 }
3796 __ pop(rax);
3797 #endif
3798
3799 __ reset_last_Java_frame(true);
3800
3801 // reset _preempting flag
3802 #ifdef ASSERT
3803 { Label L;
3804 __ movbool(rscratch1, Address(r15_thread, JavaThread::preempting_offset()));
3805 __ testbool(rscratch1);
3806 __ jcc(Assembler::notZero, L);
3807 __ stop("preempting flag should be set");
3808 __ bind(L);
3809 }
3810 #endif
3811 __ movbool(Address(r15_thread, JavaThread::preempting_offset()), false);
3812
3813 // Set rsp to enterSpecial frame
3814 __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3815
3816 Label preemption_cancelled;
3817 __ movbool(rscratch1, Address(r15_thread, JavaThread::preemption_cancelled_offset()));
3818 __ testbool(rscratch1);
3819 __ jcc(Assembler::notZero, preemption_cancelled);
3820
3821 // Remove enterSpecial frame from the stack and return to Continuation.run()
3822 SharedRuntime::continuation_enter_cleanup(_masm);
3823 __ pop(rbp);
3824 __ ret(0);
3825
3826 __ bind(preemption_cancelled);
3827 __ movbool(Address(r15_thread, JavaThread::preemption_cancelled_offset()), false);
3828 __ lea(rbp, Address(rsp, checked_cast<int32_t>(ContinuationEntry::size())));
3829 __ movptr(rscratch1, ExternalAddress((address)&ContinuationEntry::_thaw_call_pc));
3830 __ jmp(rscratch1);
3831
3832 return start;
3833 }
3834
3835 address StubGenerator::generate_cont_resume_monitor_operation() {
3836 if (!Continuations::enabled()) return nullptr;
3837 StubCodeMark mark(this, "StubRoutines","Continuation resume monitor operation");
3838 address start = __ pc();
3839
3840 #ifdef ASSERT
3841 __ push(rax);
3842 { Label L;
3843 __ get_thread(rax);
3844 __ cmpptr(r15_thread, rax);
3845 __ jcc(Assembler::equal, L);
3846 __ stop("r15 should have been preserved across VM call");
3847 __ bind(L);
3848 }
3849 __ pop(rax);
3850 #endif
3851
3852 const Register waiter_reg = c_rarg1;
3853 __ pop(waiter_reg);
3854 __ pop(waiter_reg);
3855
3856 #ifdef ASSERT
3857 { Label L;
3858 __ testptr(waiter_reg, waiter_reg);
3859 __ jcc(Assembler::notEqual, L);
3860 __ stop("ObjectMonitor to use is null");
3861 __ bind(L);
3862 }
3863 #endif // ASSERT
3864
3865 __ mov(c_rarg0, r15_thread);
3866 __ subptr(rsp, frame::arg_reg_save_area_bytes);
3867 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::resume_monitor_operation)));
3868 __ addptr(rsp, frame::arg_reg_save_area_bytes);
3869
3870 Label failAcquire;
3871 __ movbool(rscratch1, Address(r15_thread, JavaThread::preempting_offset()));
3872 __ testbool(rscratch1);
3873 __ jcc(Assembler::notEqual, failAcquire);
3874 // We have the lock now, just return to caller (we will actually hit the
3875 // return barrier to thaw more frames)
3876 __ pop(rbp);
3877 __ ret(0);
3878
3879 __ bind(failAcquire);
3880 __ movbool(Address(r15_thread, JavaThread::preempting_offset()), false);
3881 // Set rsp to enterSpecial frame
3882 __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3883 // Remove enterSpecial frame from the stack and return to Continuation.run()
3884 SharedRuntime::continuation_enter_cleanup(_masm);
3885 __ pop(rbp);
3886 __ ret(0);
3887
3888 return start;
3889 }
3890
3891 address StubGenerator::generate_cont_resume_compiler_adapter() {
3892 if (!Continuations::enabled()) return nullptr;
3893 StubCodeMark mark(this, "StubRoutines", "Continuation resume compiler adapter");
3894 address start = __ pc();
3895
3896 // The safepoint blob handler expects that rbx, being a callee saved register, will be preserved
3897 // during the VM call. It is used to check if the return pc back to Java was modified in the runtime.
3898 // If it wasn't, the return pc is modified so on return the poll instruction is skipped. Saving this
3899 // additional value of rbx during freeze will complicate too much the code, so we just zero it here
3900 // so that the comparison fails and the skip is not attempted in case the pc was indeed changed.
3901 __ movptr(rbx, NULL_WORD);
3902
3903 __ pop(rbp);
3904 __ ret(0);
3905
3906 return start;
3907 }
3908
3909 #if INCLUDE_JFR
3910
3911 // For c2: c_rarg0 is junk, call to runtime to write a checkpoint.
3912 // It returns a jobject handle to the event writer.
3913 // The handle is dereferenced and the return value is the event writer oop.
3914 RuntimeStub* StubGenerator::generate_jfr_write_checkpoint() {
3915 enum layout {
3916 rbp_off,
3917 rbpH_off,
3918 return_off,
3919 return_off2,
3920 framesize // inclusive of return address
3921 };
3922
3923 CodeBuffer code("jfr_write_checkpoint", 1024, 64);
3924 MacroAssembler* _masm = new MacroAssembler(&code);
3925 address start = __ pc();
3926
3927 __ enter();
3928 address the_pc = __ pc();
4242 if (VM_Version::supports_float16()) {
4243 // For results consistency both intrinsics should be enabled.
4244 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
4245 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
4246 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
4247 StubRoutines::_hf2f = generate_float16ToFloat();
4248 StubRoutines::_f2hf = generate_floatToFloat16();
4249 }
4250 }
4251
4252 generate_libm_stubs();
4253
4254 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4255 }
4256
4257 void StubGenerator::generate_continuation_stubs() {
4258 // Continuation stubs:
4259 StubRoutines::_cont_thaw = generate_cont_thaw();
4260 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4261 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4262 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
4263 StubRoutines::_cont_resume_monitor_operation = generate_cont_resume_monitor_operation();
4264 StubRoutines::_cont_resume_compiler_adapter = generate_cont_resume_compiler_adapter();
4265
4266 JFR_ONLY(generate_jfr_stubs();)
4267 }
4268
4269 #if INCLUDE_JFR
4270 void StubGenerator::generate_jfr_stubs() {
4271 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
4272 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4273 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4274 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4275 }
4276 #endif
4277
4278 void StubGenerator::generate_final_stubs() {
4279 // Generates the rest of stubs and initializes the entry points
4280
4281 // These entry points require SharedInfo::stack0 to be set up in
4282 // non-core builds and need to be relocatable, so they each
4283 // fabricate a RuntimeStub internally.
4284 StubRoutines::_throw_AbstractMethodError_entry =
|