< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

3166   __ ret(0);
3167 
3168 
3169   __ BIND(deoptimize_label);
3170 
3171   __ popa();
3172   __ pop(c_rarg0);
3173 
3174   __ leave();
3175 
3176   // this can be taken out, but is good for verification purposes. getting a SIGSEGV
3177   // here while still having a correct stack is valuable
3178   __ testptr(rsp, Address(rsp, 0));
3179 
3180   __ movptr(rsp, Address(rsp, 0)); // new rsp was written in the barrier
3181   __ jmp(Address(rsp, -1 * wordSize)); // jmp target should be callers verified_entry_point
3182 
3183   return start;
3184 }
3185 

















































3186  /**
3187  *  Arguments:
3188  *
3189  *  Input:
3190  *    c_rarg0   - out address
3191  *    c_rarg1   - in address
3192  *    c_rarg2   - offset
3193  *    c_rarg3   - len
3194  * not Win64
3195  *    c_rarg4   - k
3196  * Win64
3197  *    rsp+40    - k
3198  */
3199 address StubGenerator::generate_mulAdd() {
3200   __ align(CodeEntryAlignment);
3201   StubCodeMark mark(this, "StubRoutines", "mulAdd");
3202   address start = __ pc();
3203 
3204   // Win64: rcx, rdx, r8, r9 (c_rarg0, c_rarg1, ...)
3205   // Unix:  rdi, rsi, rdx, rcx, r8, r9 (c_rarg0, c_rarg1, ...)

4003     if (VM_Version::supports_avx512_vbmi()) {
4004       StubRoutines::x86::_shuffle_base64 = base64_shuffle_addr();
4005       StubRoutines::x86::_lookup_lo_base64 = base64_vbmi_lookup_lo_addr();
4006       StubRoutines::x86::_lookup_hi_base64 = base64_vbmi_lookup_hi_addr();
4007       StubRoutines::x86::_lookup_lo_base64url = base64_vbmi_lookup_lo_url_addr();
4008       StubRoutines::x86::_lookup_hi_base64url = base64_vbmi_lookup_hi_url_addr();
4009       StubRoutines::x86::_pack_vec_base64 = base64_vbmi_pack_vec_addr();
4010       StubRoutines::x86::_join_0_1_base64 = base64_vbmi_join_0_1_addr();
4011       StubRoutines::x86::_join_1_2_base64 = base64_vbmi_join_1_2_addr();
4012       StubRoutines::x86::_join_2_3_base64 = base64_vbmi_join_2_3_addr();
4013     }
4014     StubRoutines::x86::_decoding_table_base64 = base64_decoding_table_addr();
4015     StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
4016     StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
4017   }
4018 
4019   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
4020   if (bs_nm != NULL) {
4021     StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
4022   }



4023 #ifdef COMPILER2
4024   if (UseMultiplyToLenIntrinsic) {
4025     StubRoutines::_multiplyToLen = generate_multiplyToLen();
4026   }
4027   if (UseSquareToLenIntrinsic) {
4028     StubRoutines::_squareToLen = generate_squareToLen();
4029   }
4030   if (UseMulAddIntrinsic) {
4031     StubRoutines::_mulAdd = generate_mulAdd();
4032   }
4033   if (VM_Version::supports_avx512_vbmi2()) {
4034     StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
4035     StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
4036   }
4037   if (UseMontgomeryMultiplyIntrinsic) {
4038     StubRoutines::_montgomeryMultiply
4039       = CAST_FROM_FN_PTR(address, SharedRuntime::montgomery_multiply);
4040   }
4041   if (UseMontgomerySquareIntrinsic) {
4042     StubRoutines::_montgomerySquare

3166   __ ret(0);
3167 
3168 
3169   __ BIND(deoptimize_label);
3170 
3171   __ popa();
3172   __ pop(c_rarg0);
3173 
3174   __ leave();
3175 
3176   // this can be taken out, but is good for verification purposes. getting a SIGSEGV
3177   // here while still having a correct stack is valuable
3178   __ testptr(rsp, Address(rsp, 0));
3179 
3180   __ movptr(rsp, Address(rsp, 0)); // new rsp was written in the barrier
3181   __ jmp(Address(rsp, -1 * wordSize)); // jmp target should be callers verified_entry_point
3182 
3183   return start;
3184 }
3185 
3186 // Call runtime to ensure lock-stack size.
3187 // Arguments:
3188 // - c_rarg0: the required _limit pointer
3189 address StubGenerator::generate_check_lock_stack() {
3190   __ align(CodeEntryAlignment);
3191   StubCodeMark mark(this, "StubRoutines", "check_lock_stack");
3192   address start = __ pc();
3193 
3194   BLOCK_COMMENT("Entry:");
3195   __ enter(); // save rbp
3196 
3197   __ pusha();
3198 
3199   // The method may have floats as arguments, and we must spill them before calling
3200   // the VM runtime.
3201   assert(Argument::n_float_register_parameters_j == 8, "Assumption");
3202   const int xmm_size = wordSize * 2;
3203   const int xmm_spill_size = xmm_size * Argument::n_float_register_parameters_j;
3204   __ subptr(rsp, xmm_spill_size);
3205   __ movdqu(Address(rsp, xmm_size * 7), xmm7);
3206   __ movdqu(Address(rsp, xmm_size * 6), xmm6);
3207   __ movdqu(Address(rsp, xmm_size * 5), xmm5);
3208   __ movdqu(Address(rsp, xmm_size * 4), xmm4);
3209   __ movdqu(Address(rsp, xmm_size * 3), xmm3);
3210   __ movdqu(Address(rsp, xmm_size * 2), xmm2);
3211   __ movdqu(Address(rsp, xmm_size * 1), xmm1);
3212   __ movdqu(Address(rsp, xmm_size * 0), xmm0);
3213 
3214   __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<void (*)(oop*)>(LockStack::ensure_lock_stack_size)), rax);
3215 
3216   __ movdqu(xmm0, Address(rsp, xmm_size * 0));
3217   __ movdqu(xmm1, Address(rsp, xmm_size * 1));
3218   __ movdqu(xmm2, Address(rsp, xmm_size * 2));
3219   __ movdqu(xmm3, Address(rsp, xmm_size * 3));
3220   __ movdqu(xmm4, Address(rsp, xmm_size * 4));
3221   __ movdqu(xmm5, Address(rsp, xmm_size * 5));
3222   __ movdqu(xmm6, Address(rsp, xmm_size * 6));
3223   __ movdqu(xmm7, Address(rsp, xmm_size * 7));
3224   __ addptr(rsp, xmm_spill_size);
3225 
3226   __ popa();
3227 
3228   __ leave();
3229 
3230   __ ret(0);
3231 
3232   return start;
3233 }
3234 
3235  /**
3236  *  Arguments:
3237  *
3238  *  Input:
3239  *    c_rarg0   - out address
3240  *    c_rarg1   - in address
3241  *    c_rarg2   - offset
3242  *    c_rarg3   - len
3243  * not Win64
3244  *    c_rarg4   - k
3245  * Win64
3246  *    rsp+40    - k
3247  */
3248 address StubGenerator::generate_mulAdd() {
3249   __ align(CodeEntryAlignment);
3250   StubCodeMark mark(this, "StubRoutines", "mulAdd");
3251   address start = __ pc();
3252 
3253   // Win64: rcx, rdx, r8, r9 (c_rarg0, c_rarg1, ...)
3254   // Unix:  rdi, rsi, rdx, rcx, r8, r9 (c_rarg0, c_rarg1, ...)

4052     if (VM_Version::supports_avx512_vbmi()) {
4053       StubRoutines::x86::_shuffle_base64 = base64_shuffle_addr();
4054       StubRoutines::x86::_lookup_lo_base64 = base64_vbmi_lookup_lo_addr();
4055       StubRoutines::x86::_lookup_hi_base64 = base64_vbmi_lookup_hi_addr();
4056       StubRoutines::x86::_lookup_lo_base64url = base64_vbmi_lookup_lo_url_addr();
4057       StubRoutines::x86::_lookup_hi_base64url = base64_vbmi_lookup_hi_url_addr();
4058       StubRoutines::x86::_pack_vec_base64 = base64_vbmi_pack_vec_addr();
4059       StubRoutines::x86::_join_0_1_base64 = base64_vbmi_join_0_1_addr();
4060       StubRoutines::x86::_join_1_2_base64 = base64_vbmi_join_1_2_addr();
4061       StubRoutines::x86::_join_2_3_base64 = base64_vbmi_join_2_3_addr();
4062     }
4063     StubRoutines::x86::_decoding_table_base64 = base64_decoding_table_addr();
4064     StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
4065     StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
4066   }
4067 
4068   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
4069   if (bs_nm != NULL) {
4070     StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
4071   }
4072   if (UseFastLocking) {
4073     StubRoutines::x86::_check_lock_stack = generate_check_lock_stack();
4074   }
4075 #ifdef COMPILER2
4076   if (UseMultiplyToLenIntrinsic) {
4077     StubRoutines::_multiplyToLen = generate_multiplyToLen();
4078   }
4079   if (UseSquareToLenIntrinsic) {
4080     StubRoutines::_squareToLen = generate_squareToLen();
4081   }
4082   if (UseMulAddIntrinsic) {
4083     StubRoutines::_mulAdd = generate_mulAdd();
4084   }
4085   if (VM_Version::supports_avx512_vbmi2()) {
4086     StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
4087     StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
4088   }
4089   if (UseMontgomeryMultiplyIntrinsic) {
4090     StubRoutines::_montgomeryMultiply
4091       = CAST_FROM_FN_PTR(address, SharedRuntime::montgomery_multiply);
4092   }
4093   if (UseMontgomerySquareIntrinsic) {
4094     StubRoutines::_montgomerySquare
< prev index next >