7046 // rscratch2 contains the size of the frames to thaw, 0 if overflow or no more frames
7047 __ cbnz(rscratch2, thaw_success);
7048 __ lea(rscratch1, ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
7049 __ br(rscratch1);
7050 __ bind(thaw_success);
7051
7052 // make room for the thawed frames
7053 __ sub(rscratch1, sp, rscratch2);
7054 __ andr(rscratch1, rscratch1, -16); // align
7055 __ mov(sp, rscratch1);
7056
7057 if (return_barrier) {
7058 // save original return value -- again
7059 __ fmovd(rscratch1, v0);
7060 __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
7061 }
7062
7063 // If we want, we can templatize thaw by kind, and have three different entries
7064 __ movw(c_rarg1, (uint32_t)kind);
7065
7066 __ call_VM_leaf(Continuation::thaw_entry(), rthread, c_rarg1);
7067 __ mov(rscratch2, r0); // r0 is the sp of the yielding frame
7068
7069 if (return_barrier) {
7070 // restore return value (no safepoint in the call to thaw, so even an oop return value should be OK)
7071 __ ldp(rscratch1, r0, Address(__ post(sp, 2 * wordSize)));
7072 __ fmovd(v0, rscratch1);
7073 } else {
7074 __ mov(r0, zr); // return 0 (success) from doYield
7075 }
7076
7077 // we're now on the yield frame (which is in an address above us b/c rsp has been pushed down)
7078 __ sub(sp, rscratch2, 2*wordSize); // now pointing to rfp spill
7079 __ mov(rfp, sp);
7080
7081 if (return_barrier_exception) {
7082 __ ldr(c_rarg1, Address(rfp, wordSize)); // return address
7083 __ authenticate_return_address(c_rarg1);
7084 __ verify_oop(r0);
7085 // save return value containing the exception oop in callee-saved R19
7086 __ mov(r19, r0);
7123 // TODO: will probably need multiple return barriers depending on return type
7124 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
7125 address start = __ pc();
7126
7127 generate_cont_thaw(Continuation::thaw_return_barrier);
7128
7129 return start;
7130 }
7131
7132 address generate_cont_returnBarrier_exception() {
7133 if (!Continuations::enabled()) return nullptr;
7134
7135 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
7136 address start = __ pc();
7137
7138 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
7139
7140 return start;
7141 }
7142
7143 // In sun.security.util.math.intpoly.IntegerPolynomial1305, integers
7144 // are represented as long[5], with BITS_PER_LIMB = 26.
7145 // Pack five 26-bit limbs into three 64-bit registers.
7146 void pack_26(Register dest0, Register dest1, Register dest2, Register src) {
7147 __ ldp(dest0, rscratch1, Address(src, 0)); // 26 bits
7148 __ add(dest0, dest0, rscratch1, Assembler::LSL, 26); // 26 bits
7149 __ ldp(rscratch1, rscratch2, Address(src, 2 * sizeof (jlong)));
7150 __ add(dest0, dest0, rscratch1, Assembler::LSL, 52); // 12 bits
7151
7152 __ add(dest1, zr, rscratch1, Assembler::LSR, 12); // 14 bits
7153 __ add(dest1, dest1, rscratch2, Assembler::LSL, 14); // 26 bits
7154 __ ldr(rscratch1, Address(src, 4 * sizeof (jlong)));
7155 __ add(dest1, dest1, rscratch1, Assembler::LSL, 40); // 24 bits
7156
7157 if (dest2->is_valid()) {
7158 __ add(dest2, zr, rscratch1, Assembler::LSR, 24); // 2 bits
7159 } else {
7160 #ifdef ASSERT
7161 Label OK;
7162 __ cmp(zr, rscratch1, Assembler::LSR, 24); // 2 bits
8391 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
8392 StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
8393 }
8394
8395 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
8396 StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
8397 }
8398
8399 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
8400 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
8401 StubRoutines::_hf2f = generate_float16ToFloat();
8402 StubRoutines::_f2hf = generate_floatToFloat16();
8403 }
8404 }
8405
8406 void generate_continuation_stubs() {
8407 // Continuation stubs:
8408 StubRoutines::_cont_thaw = generate_cont_thaw();
8409 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
8410 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
8411
8412 JFR_ONLY(generate_jfr_stubs();)
8413 }
8414
8415 #if INCLUDE_JFR
8416 void generate_jfr_stubs() {
8417 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
8418 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
8419 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
8420 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
8421 }
8422 #endif // INCLUDE_JFR
8423
8424 void generate_final_stubs() {
8425 // support for verify_oop (must happen after universe_init)
8426 if (VerifyOops) {
8427 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
8428 }
8429 StubRoutines::_throw_AbstractMethodError_entry =
8430 generate_throw_exception("AbstractMethodError throw_exception",
|
7046 // rscratch2 contains the size of the frames to thaw, 0 if overflow or no more frames
7047 __ cbnz(rscratch2, thaw_success);
7048 __ lea(rscratch1, ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
7049 __ br(rscratch1);
7050 __ bind(thaw_success);
7051
7052 // make room for the thawed frames
7053 __ sub(rscratch1, sp, rscratch2);
7054 __ andr(rscratch1, rscratch1, -16); // align
7055 __ mov(sp, rscratch1);
7056
7057 if (return_barrier) {
7058 // save original return value -- again
7059 __ fmovd(rscratch1, v0);
7060 __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
7061 }
7062
7063 // If we want, we can templatize thaw by kind, and have three different entries
7064 __ movw(c_rarg1, (uint32_t)kind);
7065
7066 __ set_last_Java_frame(sp, rfp, rscratch1, rscratch2);
7067 __ call_VM_leaf(Continuation::thaw_entry(), rthread, c_rarg1);
7068 __ reset_last_Java_frame(true);
7069 __ mov(rscratch2, r0); // r0 is the sp of the yielding frame
7070
7071 if (return_barrier) {
7072 // restore return value (no safepoint in the call to thaw, so even an oop return value should be OK)
7073 __ ldp(rscratch1, r0, Address(__ post(sp, 2 * wordSize)));
7074 __ fmovd(v0, rscratch1);
7075 } else {
7076 __ mov(r0, zr); // return 0 (success) from doYield
7077 }
7078
7079 // we're now on the yield frame (which is in an address above us b/c rsp has been pushed down)
7080 __ sub(sp, rscratch2, 2*wordSize); // now pointing to rfp spill
7081 __ mov(rfp, sp);
7082
7083 if (return_barrier_exception) {
7084 __ ldr(c_rarg1, Address(rfp, wordSize)); // return address
7085 __ authenticate_return_address(c_rarg1);
7086 __ verify_oop(r0);
7087 // save return value containing the exception oop in callee-saved R19
7088 __ mov(r19, r0);
7125 // TODO: will probably need multiple return barriers depending on return type
7126 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
7127 address start = __ pc();
7128
7129 generate_cont_thaw(Continuation::thaw_return_barrier);
7130
7131 return start;
7132 }
7133
7134 address generate_cont_returnBarrier_exception() {
7135 if (!Continuations::enabled()) return nullptr;
7136
7137 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
7138 address start = __ pc();
7139
7140 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
7141
7142 return start;
7143 }
7144
7145 address generate_cont_preempt_stub() {
7146 if (!Continuations::enabled()) return nullptr;
7147 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
7148 address start = __ pc();
7149
7150 __ reset_last_Java_frame(true);
7151
7152 // reset the flag
7153 __ strb(zr, Address(rthread, JavaThread::preempting_offset()));
7154
7155 // Set sp to enterSpecial frame and then remove it from the stack
7156 __ ldr(rscratch2, Address(rthread, JavaThread::cont_entry_offset()));
7157 __ mov(sp, rscratch2);
7158
7159 Label preemption_cancelled;
7160 __ ldrb(rscratch1, Address(rthread, JavaThread::preemption_cancelled_offset()));
7161 __ cbnz(rscratch1, preemption_cancelled);
7162
7163 // Remove enterSpecial frame from the stack and return to Continuation.run()
7164 SharedRuntime::continuation_enter_cleanup(_masm);
7165 __ leave();
7166 __ ret(lr);
7167
7168 __ bind(preemption_cancelled);
7169 __ strb(zr, Address(rthread, JavaThread::preemption_cancelled_offset()));
7170 __ lea(rfp, Address(sp, checked_cast<int32_t>(ContinuationEntry::size())));
7171 __ lea(rscratch1, ExternalAddress((address)&ContinuationEntry::_thaw_call_pc));
7172 __ ldr(rscratch1, Address(rscratch1));
7173 __ br(rscratch1);
7174
7175 return start;
7176 }
7177
7178 address generate_cont_resume_compiler_adapter() {
7179 if (!Continuations::enabled()) return nullptr;
7180 StubCodeMark mark(this, "StubRoutines", "Continuation resume compiler adapter");
7181 address start = __ pc();
7182
7183 // The safepoint blob handler expects that r20, being a callee saved register, will be preserved
7184 // during the VM call. It is used to check if the return pc back to Java was modified in the runtime.
7185 // If it wasn't, the return pc is modified so on return the poll instruction is skipped. Saving this
7186 // additional value of r20 during freeze will complicate too much the code, so we just zero it here
7187 // so that the comparison fails and the skip is not attempted in case the pc was indeed changed.
7188 __ movptr(r20, NULL_WORD);
7189
7190 __ leave();
7191 __ ret(lr);
7192
7193 return start;
7194 }
7195
7196 address generate_cont_resume_monitor_operation() {
7197 if (!Continuations::enabled()) return nullptr;
7198 StubCodeMark mark(this, "StubRoutines","Continuation resume monitor operation");
7199 address start = __ pc();
7200
7201 const Register waiter_reg = c_rarg1;
7202 __ ldr(waiter_reg, __ post(sp, 2 * wordSize));
7203
7204 #ifdef ASSERT
7205 { Label L;
7206 __ cbnz(waiter_reg, L);
7207 __ stop("ObjectMonitor to use is null");
7208 __ bind(L);
7209 }
7210 #endif // ASSERT
7211
7212 __ set_last_Java_frame(sp, rfp, lr, rscratch1);
7213 __ mov(c_rarg0, rthread);
7214 __ rt_call(CAST_FROM_FN_PTR(address, SharedRuntime::resume_monitor_operation));
7215 __ reset_last_Java_frame(true);
7216
7217 Label failAcquire;
7218 __ ldrb(rscratch1, Address(rthread, JavaThread::preempting_offset()));
7219 __ cbnz(rscratch1, failAcquire);
7220 // We have the lock now, just return to caller (we will actually hit the
7221 // return barrier to thaw more frames)
7222
7223 // ThawBase::push_resume_monitor_operation set things up so that
7224 // SP now points to {fp, lr}.
7225 __ ldp(rfp, lr, Address(__ post(sp, 2 * wordSize)));
7226 __ ret(lr);
7227
7228 __ bind(failAcquire);
7229 __ strb(/*false*/zr, Address(rthread, JavaThread::preempting_offset()));
7230 // Set sp to enterSpecial frame
7231 __ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset()));
7232 __ mov(sp, rscratch1);
7233 // Remove enterSpecial frame from the stack and return to Continuation.run()
7234 SharedRuntime::continuation_enter_cleanup(_masm);
7235 __ leave();
7236 __ ret(lr);
7237
7238 return start;
7239 }
7240
7241 // In sun.security.util.math.intpoly.IntegerPolynomial1305, integers
7242 // are represented as long[5], with BITS_PER_LIMB = 26.
7243 // Pack five 26-bit limbs into three 64-bit registers.
7244 void pack_26(Register dest0, Register dest1, Register dest2, Register src) {
7245 __ ldp(dest0, rscratch1, Address(src, 0)); // 26 bits
7246 __ add(dest0, dest0, rscratch1, Assembler::LSL, 26); // 26 bits
7247 __ ldp(rscratch1, rscratch2, Address(src, 2 * sizeof (jlong)));
7248 __ add(dest0, dest0, rscratch1, Assembler::LSL, 52); // 12 bits
7249
7250 __ add(dest1, zr, rscratch1, Assembler::LSR, 12); // 14 bits
7251 __ add(dest1, dest1, rscratch2, Assembler::LSL, 14); // 26 bits
7252 __ ldr(rscratch1, Address(src, 4 * sizeof (jlong)));
7253 __ add(dest1, dest1, rscratch1, Assembler::LSL, 40); // 24 bits
7254
7255 if (dest2->is_valid()) {
7256 __ add(dest2, zr, rscratch1, Assembler::LSR, 24); // 2 bits
7257 } else {
7258 #ifdef ASSERT
7259 Label OK;
7260 __ cmp(zr, rscratch1, Assembler::LSR, 24); // 2 bits
8489 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
8490 StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
8491 }
8492
8493 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
8494 StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
8495 }
8496
8497 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
8498 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
8499 StubRoutines::_hf2f = generate_float16ToFloat();
8500 StubRoutines::_f2hf = generate_floatToFloat16();
8501 }
8502 }
8503
8504 void generate_continuation_stubs() {
8505 // Continuation stubs:
8506 StubRoutines::_cont_thaw = generate_cont_thaw();
8507 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
8508 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
8509 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
8510 StubRoutines::_cont_resume_monitor_operation = generate_cont_resume_monitor_operation();
8511 StubRoutines::_cont_resume_compiler_adapter = generate_cont_resume_compiler_adapter();
8512
8513 JFR_ONLY(generate_jfr_stubs();)
8514 }
8515
8516 #if INCLUDE_JFR
8517 void generate_jfr_stubs() {
8518 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
8519 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
8520 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
8521 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
8522 }
8523 #endif // INCLUDE_JFR
8524
8525 void generate_final_stubs() {
8526 // support for verify_oop (must happen after universe_init)
8527 if (VerifyOops) {
8528 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
8529 }
8530 StubRoutines::_throw_AbstractMethodError_entry =
8531 generate_throw_exception("AbstractMethodError throw_exception",
|