< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

3292     }
3293     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
3294       StubRoutines::_dtan = generate_libmTan();
3295     }
3296     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dexp)) {
3297       StubRoutines::_dexp = generate_libmExp();
3298     }
3299     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
3300       StubRoutines::_dpow = generate_libmPow();
3301     }
3302     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
3303       StubRoutines::_dlog = generate_libmLog();
3304     }
3305     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
3306       StubRoutines::_dlog10 = generate_libmLog10();
3307     }
3308   }
3309 }
3310 
3311 








































3312 address StubGenerator::generate_cont_thaw(const char* label, Continuation::thaw_kind kind) {
3313   if (!Continuations::enabled()) return nullptr;
3314 
3315   bool return_barrier = Continuation::is_thaw_return_barrier(kind);
3316   bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
3317 
3318   StubCodeMark mark(this, "StubRoutines", label);
3319   address start = __ pc();
3320 
3321   // TODO: Handle Valhalla return types. May require generating different return barriers.
3322 
3323   if (!return_barrier) {
3324     // Pop return address. If we don't do this, we get a drift,
3325     // where the bottom-most frozen frame continuously grows.
3326     __ pop(c_rarg3);
3327   } else {
3328     __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3329   }
3330 
3331 #ifdef ASSERT

3661                                               throw_delayed_StackOverflowError));
3662   if (UseCRC32Intrinsics) {
3663     // set table address before stub generation which use it
3664     StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
3665     StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
3666   }
3667 
3668   if (UseCRC32CIntrinsics) {
3669     bool supports_clmul = VM_Version::supports_clmul();
3670     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3671     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3672     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3673   }
3674 
3675   if (UseAdler32Intrinsics) {
3676      StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
3677   }
3678 }
3679 
3680 void StubGenerator::generate_phase1() {




3681   // Continuation stubs:
3682   StubRoutines::_cont_thaw          = generate_cont_thaw();
3683   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3684   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3685 
3686   JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
3687   JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
3688 }
3689 
3690 void StubGenerator::generate_all() {
3691   // Generates all stubs and initializes the entry points
3692 
3693   // These entry points require SharedInfo::stack0 to be set up in
3694   // non-core builds and need to be relocatable, so they each
3695   // fabricate a RuntimeStub internally.
3696   StubRoutines::_throw_AbstractMethodError_entry =
3697     generate_throw_exception("AbstractMethodError throw_exception",
3698                              CAST_FROM_FN_PTR(address,
3699                                               SharedRuntime::
3700                                               throw_AbstractMethodError));

3292     }
3293     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
3294       StubRoutines::_dtan = generate_libmTan();
3295     }
3296     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dexp)) {
3297       StubRoutines::_dexp = generate_libmExp();
3298     }
3299     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
3300       StubRoutines::_dpow = generate_libmPow();
3301     }
3302     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
3303       StubRoutines::_dlog = generate_libmLog();
3304     }
3305     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
3306       StubRoutines::_dlog10 = generate_libmLog10();
3307     }
3308   }
3309 }
3310 
3311 
3312   // Call stub to call runtime oopDesc::load_nklass_runtime().
3313   // rax: call argument (object)
3314   // rax: return object's narrowKlass
3315   // Preserves all caller-saved registers, except rax
3316 #ifdef _LP64
3317 address StubGenerator::generate_load_nklass() {
3318   __ align(CodeEntryAlignment);
3319   StubCodeMark(this, "StubRoutines", "load_nklass");
3320   address start = __ pc();
3321   __ enter(); // save rbp
3322 
3323   __ andptr(rsp, -(StackAlignmentInBytes));    // Align stack
3324   __ push_FPU_state();
3325 
3326   __ push(rdi);
3327   __ push(rsi);
3328   __ push(rdx);
3329   __ push(rcx);
3330   __ push(r8);
3331   __ push(r9);
3332   __ push(r10);
3333   __ push(r11);
3334   __ call_VM_leaf(CAST_FROM_FN_PTR(address, oopDesc::load_nklass_runtime), rax);
3335   __ pop(r11);
3336   __ pop(r10);
3337   __ pop(r9);
3338   __ pop(r8);
3339   __ pop(rcx);
3340   __ pop(rdx);
3341   __ pop(rsi);
3342   __ pop(rdi);
3343 
3344   __ pop_FPU_state();
3345 
3346   __ leave();
3347   __ ret(0);
3348   return start;
3349 }
3350 #endif // _LP64
3351 
3352 address StubGenerator::generate_cont_thaw(const char* label, Continuation::thaw_kind kind) {
3353   if (!Continuations::enabled()) return nullptr;
3354 
3355   bool return_barrier = Continuation::is_thaw_return_barrier(kind);
3356   bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
3357 
3358   StubCodeMark mark(this, "StubRoutines", label);
3359   address start = __ pc();
3360 
3361   // TODO: Handle Valhalla return types. May require generating different return barriers.
3362 
3363   if (!return_barrier) {
3364     // Pop return address. If we don't do this, we get a drift,
3365     // where the bottom-most frozen frame continuously grows.
3366     __ pop(c_rarg3);
3367   } else {
3368     __ movptr(rsp, Address(r15_thread, JavaThread::cont_entry_offset()));
3369   }
3370 
3371 #ifdef ASSERT

3701                                               throw_delayed_StackOverflowError));
3702   if (UseCRC32Intrinsics) {
3703     // set table address before stub generation which use it
3704     StubRoutines::_crc_table_adr = (address)StubRoutines::x86::_crc_table;
3705     StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
3706   }
3707 
3708   if (UseCRC32CIntrinsics) {
3709     bool supports_clmul = VM_Version::supports_clmul();
3710     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3711     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3712     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3713   }
3714 
3715   if (UseAdler32Intrinsics) {
3716      StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
3717   }
3718 }
3719 
3720 void StubGenerator::generate_phase1() {
3721 #ifdef _LP64
3722   StubRoutines::_load_nklass = generate_load_nklass();
3723 #endif
3724 
3725   // Continuation stubs:
3726   StubRoutines::_cont_thaw          = generate_cont_thaw();
3727   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3728   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3729 
3730   JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
3731   JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
3732 }
3733 
3734 void StubGenerator::generate_all() {
3735   // Generates all stubs and initializes the entry points
3736 
3737   // These entry points require SharedInfo::stack0 to be set up in
3738   // non-core builds and need to be relocatable, so they each
3739   // fabricate a RuntimeStub internally.
3740   StubRoutines::_throw_AbstractMethodError_entry =
3741     generate_throw_exception("AbstractMethodError throw_exception",
3742                              CAST_FROM_FN_PTR(address,
3743                                               SharedRuntime::
3744                                               throw_AbstractMethodError));
< prev index next >