< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/vmIntrinsics.hpp"
  28 #include "compiler/oopMap.hpp"
  29 #include "gc/shared/barrierSet.hpp"
  30 #include "gc/shared/barrierSetAssembler.hpp"
  31 #include "gc/shared/barrierSetNMethod.hpp"
  32 #include "gc/shared/gc_globals.hpp"
  33 #include "memory/universe.hpp"
  34 #include "prims/jvmtiExport.hpp"
  35 #include "prims/upcallLinker.hpp"
  36 #include "runtime/arguments.hpp"

  37 #include "runtime/javaThread.hpp"
  38 #include "runtime/sharedRuntime.hpp"
  39 #include "runtime/stubRoutines.hpp"
  40 #include "stubGenerator_x86_64.hpp"
  41 #ifdef COMPILER2
  42 #include "opto/runtime.hpp"
  43 #include "opto/c2_globals.hpp"
  44 #endif
  45 #if INCLUDE_JVMCI
  46 #include "jvmci/jvmci_globals.hpp"
  47 #endif
  48 #if INCLUDE_JFR
  49 #include "jfr/support/jfrIntrinsics.hpp"
  50 #endif
  51 
  52 // For a more detailed description of the stub routine structure
  53 // see the comment in stubRoutines.hpp
  54 
  55 #define __ _masm->
  56 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)

3767     __ ret(0);
3768   }
3769 
3770   return start;
3771 }
3772 
3773 address StubGenerator::generate_cont_thaw() {
3774   return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
3775 }
3776 
3777 // TODO: will probably need multiple return barriers depending on return type
3778 
3779 address StubGenerator::generate_cont_returnBarrier() {
3780   return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
3781 }
3782 
3783 address StubGenerator::generate_cont_returnBarrier_exception() {
3784   return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
3785 }
3786 






























































































































3787 #if INCLUDE_JFR
3788 
3789 // For c2: c_rarg0 is junk, call to runtime to write a checkpoint.
3790 // It returns a jobject handle to the event writer.
3791 // The handle is dereferenced and the return value is the event writer oop.
3792 RuntimeStub* StubGenerator::generate_jfr_write_checkpoint() {
3793   enum layout {
3794     rbp_off,
3795     rbpH_off,
3796     return_off,
3797     return_off2,
3798     framesize // inclusive of return address
3799   };
3800 
3801   CodeBuffer code("jfr_write_checkpoint", 1024, 64);
3802   MacroAssembler* _masm = new MacroAssembler(&code);
3803   address start = __ pc();
3804 
3805   __ enter();
3806   address the_pc = __ pc();

4072   if (VM_Version::supports_float16()) {
4073     // For results consistency both intrinsics should be enabled.
4074     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
4075     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
4076         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
4077       StubRoutines::_hf2f = generate_float16ToFloat();
4078       StubRoutines::_f2hf = generate_floatToFloat16();
4079     }
4080   }
4081 
4082   generate_libm_stubs();
4083 
4084   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4085 }
4086 
4087 void StubGenerator::generate_continuation_stubs() {
4088   // Continuation stubs:
4089   StubRoutines::_cont_thaw          = generate_cont_thaw();
4090   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4091   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();



4092 
4093   JFR_ONLY(generate_jfr_stubs();)
4094 }
4095 
4096 #if INCLUDE_JFR
4097 void StubGenerator::generate_jfr_stubs() {
4098   StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
4099   StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4100   StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4101   StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4102 }
4103 #endif
4104 
4105 void StubGenerator::generate_final_stubs() {
4106   // Generates the rest of stubs and initializes the entry points
4107 
4108   // These entry points require SharedInfo::stack0 to be set up in
4109   // non-core builds and need to be relocatable, so they each
4110   // fabricate a RuntimeStub internally.
4111   StubRoutines::_throw_AbstractMethodError_entry =

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/vmIntrinsics.hpp"
  28 #include "compiler/oopMap.hpp"
  29 #include "gc/shared/barrierSet.hpp"
  30 #include "gc/shared/barrierSetAssembler.hpp"
  31 #include "gc/shared/barrierSetNMethod.hpp"
  32 #include "gc/shared/gc_globals.hpp"
  33 #include "memory/universe.hpp"
  34 #include "prims/jvmtiExport.hpp"
  35 #include "prims/upcallLinker.hpp"
  36 #include "runtime/arguments.hpp"
  37 #include "runtime/continuationEntry.hpp"
  38 #include "runtime/javaThread.hpp"
  39 #include "runtime/sharedRuntime.hpp"
  40 #include "runtime/stubRoutines.hpp"
  41 #include "stubGenerator_x86_64.hpp"
  42 #ifdef COMPILER2
  43 #include "opto/runtime.hpp"
  44 #include "opto/c2_globals.hpp"
  45 #endif
  46 #if INCLUDE_JVMCI
  47 #include "jvmci/jvmci_globals.hpp"
  48 #endif
  49 #if INCLUDE_JFR
  50 #include "jfr/support/jfrIntrinsics.hpp"
  51 #endif
  52 
  53 // For a more detailed description of the stub routine structure
  54 // see the comment in stubRoutines.hpp
  55 
  56 #define __ _masm->
  57 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)

3768     __ ret(0);
3769   }
3770 
3771   return start;
3772 }
3773 
3774 address StubGenerator::generate_cont_thaw() {
3775   return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
3776 }
3777 
3778 // TODO: will probably need multiple return barriers depending on return type
3779 
3780 address StubGenerator::generate_cont_returnBarrier() {
3781   return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
3782 }
3783 
3784 address StubGenerator::generate_cont_returnBarrier_exception() {
3785   return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
3786 }
3787 
3788 address StubGenerator::generate_cont_preempt_stub() {
3789   if (!Continuations::enabled()) return nullptr;
3790   StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
3791   address start = __ pc();
3792 
3793 #ifdef ASSERT
3794   __ push(rax);
3795   { Label L;
3796     __ get_thread(rax);
3797     __ cmpptr(r15_thread, rax);
3798     __ jcc(Assembler::equal, L);
3799     __ stop("r15 should have been preserved across VM call");
3800     __ bind(L);
3801   }
3802   __ pop(rax);
3803 #endif
3804 
3805   __ reset_last_Java_frame(true);
3806 
3807   // reset _preempting flag
3808 #ifdef ASSERT
3809   { Label L;
3810     __ movbool(rscratch1, Address(r15_thread, JavaThread::preempting_offset()));
3811     __ testbool(rscratch1);
3812     __ jcc(Assembler::notZero, L);
3813     __ stop("preempting flag should be set");
3814     __ bind(L);
3815   }
3816 #endif
3817   __ movbool(Address(r15_thread, JavaThread::preempting_offset()), false);
3818 
3819   // Set rsp to enterSpecial frame
3820   __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3821 
3822   Label preemption_cancelled;
3823   __ movbool(rscratch1, Address(r15_thread, JavaThread::preemption_cancelled_offset()));
3824   __ testbool(rscratch1);
3825   __ jcc(Assembler::notZero, preemption_cancelled);
3826 
3827   // Remove enterSpecial frame from the stack and return to Continuation.run()
3828   SharedRuntime::continuation_enter_cleanup(_masm);
3829   __ pop(rbp);
3830   __ ret(0);
3831 
3832   __ bind(preemption_cancelled);
3833   __ lea(rbp, Address(rsp, checked_cast<int32_t>(ContinuationEntry::size())));
3834   __ movptr(rscratch1, ExternalAddress((address)&ContinuationEntry::_thaw_call_pc));
3835   __ jmp(rscratch1);
3836 
3837   return start;
3838 }
3839 
3840 address StubGenerator::generate_cont_preempt_monitorenter_redo() {
3841   if (!Continuations::enabled()) return nullptr;
3842   StubCodeMark mark(this, "StubRoutines","Continuation monitorenter redo stub");
3843   address start = __ pc();
3844 
3845 #ifdef ASSERT
3846   __ push(rax);
3847   { Label L;
3848     __ get_thread(rax);
3849     __ cmpptr(r15_thread, rax);
3850     __ jcc(Assembler::equal, L);
3851     __ stop("r15 should have been preserved across VM call");
3852     __ bind(L);
3853   }
3854   __ pop(rax);
3855 #endif
3856 
3857   const Register mon_reg = c_rarg1;
3858   __ pop(mon_reg);
3859   __ pop(mon_reg);
3860 
3861 #ifdef ASSERT
3862   { Label L;
3863     __ testptr(mon_reg, mon_reg);
3864     __ jcc(Assembler::notEqual, L);
3865     __ stop("ObjectMonitor to use is null");
3866     __ bind(L);
3867   }
3868 #endif // ASSERT
3869 
3870   __ mov(c_rarg0, r15_thread);
3871   __ subptr(rsp, frame::arg_reg_save_area_bytes);
3872   __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::redo_monitorenter)));
3873   __ addptr(rsp, frame::arg_reg_save_area_bytes);
3874 
3875   Label failAcquire;
3876   __ movbool(rscratch1, Address(r15_thread, JavaThread::preempting_offset()));
3877   __ testbool(rscratch1);
3878   __ jcc(Assembler::notEqual, failAcquire);
3879   // We have the lock now, just return to caller (we will actually hit the
3880   // return barrier to thaw more frames)
3881   __ pop(rbp);
3882   __ ret(0);
3883 
3884   __ bind(failAcquire);
3885   __ movbool(Address(r15_thread, JavaThread::preempting_offset()), false);
3886   // Set rsp to enterSpecial frame
3887   __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3888   // Remove enterSpecial frame from the stack and return to Continuation.run()
3889   SharedRuntime::continuation_enter_cleanup(_masm);
3890   __ pop(rbp);
3891   __ ret(0);
3892 
3893   return start;
3894 }
3895 
3896 address StubGenerator::generate_cont_preempt_rerun_compiler_adapter() {
3897   if (!Continuations::enabled()) return nullptr;
3898   StubCodeMark mark(this, "StubRoutines", "Continuation preempt safepoint blob adapter");
3899   address start = __ pc();
3900 
3901   // The safepoint blob handler expects that rbx, being a callee saved register, will be preserved
3902   // during the VM call. It is used to check if the return pc back to Java was modified in the runtime.
3903   // If it wasn't, the return pc is modified so on return the poll instruction is skipped. Saving this
3904   // additional value of rbx during freeze will complicate too much the code, so we just zero it here
3905   // so that the comparison fails and the skip is not attempted in case the pc was indeed changed.
3906   __ movptr(rbx, NULL_WORD);
3907 
3908   __ pop(rbp);
3909   __ ret(0);
3910 
3911   return start;
3912 }
3913 
3914 #if INCLUDE_JFR
3915 
3916 // For c2: c_rarg0 is junk, call to runtime to write a checkpoint.
3917 // It returns a jobject handle to the event writer.
3918 // The handle is dereferenced and the return value is the event writer oop.
3919 RuntimeStub* StubGenerator::generate_jfr_write_checkpoint() {
3920   enum layout {
3921     rbp_off,
3922     rbpH_off,
3923     return_off,
3924     return_off2,
3925     framesize // inclusive of return address
3926   };
3927 
3928   CodeBuffer code("jfr_write_checkpoint", 1024, 64);
3929   MacroAssembler* _masm = new MacroAssembler(&code);
3930   address start = __ pc();
3931 
3932   __ enter();
3933   address the_pc = __ pc();

4199   if (VM_Version::supports_float16()) {
4200     // For results consistency both intrinsics should be enabled.
4201     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
4202     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
4203         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
4204       StubRoutines::_hf2f = generate_float16ToFloat();
4205       StubRoutines::_f2hf = generate_floatToFloat16();
4206     }
4207   }
4208 
4209   generate_libm_stubs();
4210 
4211   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4212 }
4213 
4214 void StubGenerator::generate_continuation_stubs() {
4215   // Continuation stubs:
4216   StubRoutines::_cont_thaw          = generate_cont_thaw();
4217   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4218   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4219   StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
4220   StubRoutines::_cont_preempt_monitorenter_redo = generate_cont_preempt_monitorenter_redo();
4221   StubRoutines::_cont_preempt_rerun_compiler_adapter = generate_cont_preempt_rerun_compiler_adapter();
4222 
4223   JFR_ONLY(generate_jfr_stubs();)
4224 }
4225 
4226 #if INCLUDE_JFR
4227 void StubGenerator::generate_jfr_stubs() {
4228   StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
4229   StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4230   StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4231   StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4232 }
4233 #endif
4234 
4235 void StubGenerator::generate_final_stubs() {
4236   // Generates the rest of stubs and initializes the entry points
4237 
4238   // These entry points require SharedInfo::stack0 to be set up in
4239   // non-core builds and need to be relocatable, so they each
4240   // fabricate a RuntimeStub internally.
4241   StubRoutines::_throw_AbstractMethodError_entry =
< prev index next >