7004 // rscratch2 contains the size of the frames to thaw, 0 if overflow or no more frames
7005 __ cbnz(rscratch2, thaw_success);
7006 __ lea(rscratch1, ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
7007 __ br(rscratch1);
7008 __ bind(thaw_success);
7009
7010 // make room for the thawed frames
7011 __ sub(rscratch1, sp, rscratch2);
7012 __ andr(rscratch1, rscratch1, -16); // align
7013 __ mov(sp, rscratch1);
7014
7015 if (return_barrier) {
7016 // save original return value -- again
7017 __ fmovd(rscratch1, v0);
7018 __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
7019 }
7020
7021 // If we want, we can templatize thaw by kind, and have three different entries
7022 __ movw(c_rarg1, (uint32_t)kind);
7023
7024 __ call_VM_leaf(Continuation::thaw_entry(), rthread, c_rarg1);
7025 __ mov(rscratch2, r0); // r0 is the sp of the yielding frame
7026
7027 if (return_barrier) {
7028 // restore return value (no safepoint in the call to thaw, so even an oop return value should be OK)
7029 __ ldp(rscratch1, r0, Address(__ post(sp, 2 * wordSize)));
7030 __ fmovd(v0, rscratch1);
7031 } else {
7032 __ mov(r0, zr); // return 0 (success) from doYield
7033 }
7034
7035 // we're now on the yield frame (which is in an address above us b/c rsp has been pushed down)
7036 __ sub(sp, rscratch2, 2*wordSize); // now pointing to rfp spill
7037 __ mov(rfp, sp);
7038
7039 if (return_barrier_exception) {
7040 __ ldr(c_rarg1, Address(rfp, wordSize)); // return address
7041 __ authenticate_return_address(c_rarg1);
7042 __ verify_oop(r0);
7043 // save return value containing the exception oop in callee-saved R19
7044 __ mov(r19, r0);
7081 // TODO: will probably need multiple return barriers depending on return type
7082 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
7083 address start = __ pc();
7084
7085 generate_cont_thaw(Continuation::thaw_return_barrier);
7086
7087 return start;
7088 }
7089
7090 address generate_cont_returnBarrier_exception() {
7091 if (!Continuations::enabled()) return nullptr;
7092
7093 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
7094 address start = __ pc();
7095
7096 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
7097
7098 return start;
7099 }
7100
7101 // In sun.security.util.math.intpoly.IntegerPolynomial1305, integers
7102 // are represented as long[5], with BITS_PER_LIMB = 26.
7103 // Pack five 26-bit limbs into three 64-bit registers.
7104 void pack_26(Register dest0, Register dest1, Register dest2, Register src) {
7105 __ ldp(dest0, rscratch1, Address(src, 0)); // 26 bits
7106 __ add(dest0, dest0, rscratch1, Assembler::LSL, 26); // 26 bits
7107 __ ldp(rscratch1, rscratch2, Address(src, 2 * sizeof (jlong)));
7108 __ add(dest0, dest0, rscratch1, Assembler::LSL, 52); // 12 bits
7109
7110 __ add(dest1, zr, rscratch1, Assembler::LSR, 12); // 14 bits
7111 __ add(dest1, dest1, rscratch2, Assembler::LSL, 14); // 26 bits
7112 __ ldr(rscratch1, Address(src, 4 * sizeof (jlong)));
7113 __ add(dest1, dest1, rscratch1, Assembler::LSL, 40); // 24 bits
7114
7115 if (dest2->is_valid()) {
7116 __ add(dest2, zr, rscratch1, Assembler::LSR, 24); // 2 bits
7117 } else {
7118 #ifdef ASSERT
7119 Label OK;
7120 __ cmp(zr, rscratch1, Assembler::LSR, 24); // 2 bits
8349 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
8350 StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
8351 }
8352
8353 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
8354 StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
8355 }
8356
8357 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
8358 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
8359 StubRoutines::_hf2f = generate_float16ToFloat();
8360 StubRoutines::_f2hf = generate_floatToFloat16();
8361 }
8362 }
8363
8364 void generate_continuation_stubs() {
8365 // Continuation stubs:
8366 StubRoutines::_cont_thaw = generate_cont_thaw();
8367 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
8368 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
8369
8370 JFR_ONLY(generate_jfr_stubs();)
8371 }
8372
8373 #if INCLUDE_JFR
8374 void generate_jfr_stubs() {
8375 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
8376 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
8377 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
8378 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
8379 }
8380 #endif // INCLUDE_JFR
8381
8382 void generate_final_stubs() {
8383 // support for verify_oop (must happen after universe_init)
8384 if (VerifyOops) {
8385 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
8386 }
8387 StubRoutines::_throw_AbstractMethodError_entry =
8388 generate_throw_exception("AbstractMethodError throw_exception",
|
7004 // rscratch2 contains the size of the frames to thaw, 0 if overflow or no more frames
7005 __ cbnz(rscratch2, thaw_success);
7006 __ lea(rscratch1, ExternalAddress(StubRoutines::throw_StackOverflowError_entry()));
7007 __ br(rscratch1);
7008 __ bind(thaw_success);
7009
7010 // make room for the thawed frames
7011 __ sub(rscratch1, sp, rscratch2);
7012 __ andr(rscratch1, rscratch1, -16); // align
7013 __ mov(sp, rscratch1);
7014
7015 if (return_barrier) {
7016 // save original return value -- again
7017 __ fmovd(rscratch1, v0);
7018 __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
7019 }
7020
7021 // If we want, we can templatize thaw by kind, and have three different entries
7022 __ movw(c_rarg1, (uint32_t)kind);
7023
7024 __ set_last_Java_frame(sp, rfp, rscratch1, rscratch2);
7025 __ call_VM_leaf(Continuation::thaw_entry(), rthread, c_rarg1);
7026 __ reset_last_Java_frame(true);
7027 __ mov(rscratch2, r0); // r0 is the sp of the yielding frame
7028
7029 if (return_barrier) {
7030 // restore return value (no safepoint in the call to thaw, so even an oop return value should be OK)
7031 __ ldp(rscratch1, r0, Address(__ post(sp, 2 * wordSize)));
7032 __ fmovd(v0, rscratch1);
7033 } else {
7034 __ mov(r0, zr); // return 0 (success) from doYield
7035 }
7036
7037 // we're now on the yield frame (which is in an address above us b/c rsp has been pushed down)
7038 __ sub(sp, rscratch2, 2*wordSize); // now pointing to rfp spill
7039 __ mov(rfp, sp);
7040
7041 if (return_barrier_exception) {
7042 __ ldr(c_rarg1, Address(rfp, wordSize)); // return address
7043 __ authenticate_return_address(c_rarg1);
7044 __ verify_oop(r0);
7045 // save return value containing the exception oop in callee-saved R19
7046 __ mov(r19, r0);
7083 // TODO: will probably need multiple return barriers depending on return type
7084 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
7085 address start = __ pc();
7086
7087 generate_cont_thaw(Continuation::thaw_return_barrier);
7088
7089 return start;
7090 }
7091
7092 address generate_cont_returnBarrier_exception() {
7093 if (!Continuations::enabled()) return nullptr;
7094
7095 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
7096 address start = __ pc();
7097
7098 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
7099
7100 return start;
7101 }
7102
7103 address generate_cont_preempt_stub() {
7104 if (!Continuations::enabled()) return nullptr;
7105 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
7106 address start = __ pc();
7107
7108 __ reset_last_Java_frame(true);
7109
7110 // reset the flag
7111 __ strb(zr, Address(rthread, JavaThread::preempting_offset()));
7112
7113 // Set sp to enterSpecial frame and then remove it from the stack
7114 __ ldr(rscratch2, Address(rthread, JavaThread::cont_entry_offset()));
7115 __ mov(sp, rscratch2);
7116
7117 Label preemption_cancelled;
7118 // FIXME: Whose responsibility is it to clear this flag?
7119 __ ldrb(rscratch1, Address(rthread, JavaThread::preemption_cancelled_offset()));
7120 __ cbnz(rscratch1, preemption_cancelled);
7121
7122 //__ trace("Remove enterSpecial frame from the stack and return to Continuation.run()");
7123 // Remove enterSpecial frame from the stack and return to Continuation.run()
7124 SharedRuntime::continuation_enter_cleanup(_masm);
7125 __ leave();
7126 __ ret(lr);
7127
7128 __ bind(preemption_cancelled);
7129 //__ trace("preemption_cancelled");
7130 __ lea(rfp, Address(sp, checked_cast<int32_t>(ContinuationEntry::size())));
7131 __ lea(rscratch1, ExternalAddress((address)&ContinuationEntry::_thaw_call_pc));
7132 __ ldr(rscratch1, Address(rscratch1));
7133 __ br(rscratch1);
7134
7135 return start;
7136 }
7137
7138 address generate_cont_preempt_rerun_compiler_adapter() {
7139 if (!Continuations::enabled()) return nullptr;
7140 StubCodeMark mark(this, "StubRoutines", "Continuation preempt safepoint blob adapter");
7141 address start = __ pc();
7142
7143 // The safepoint blob handler expects that r20, being a callee saved register, will be preserved
7144 // during the VM call. It is used to check if the return pc back to Java was modified in the runtime.
7145 // If it wasn't, the return pc is modified so on return the poll instruction is skipped. Saving this
7146 // additional value of r20 during freeze will complicate too much the code, so we just zero it here
7147 // so that the comparison fails and the skip is not attempted in case the pc was indeed changed.
7148 __ movptr(r20, NULL_WORD);
7149
7150 __ leave();
7151 __ ret(lr);
7152
7153 return start;
7154 }
7155
7156 address generate_cont_preempt_monitorenter_redo() {
7157 if (!Continuations::enabled()) return nullptr;
7158 StubCodeMark mark(this, "StubRoutines","Continuation monitorenter redo stub");
7159 address start = __ pc();
7160
7161 const Register mon_reg = c_rarg1;
7162 __ ldr(mon_reg, __ post(sp, 2 * wordSize));
7163
7164 #ifdef ASSERT
7165 { Label L;
7166 __ cbnz(mon_reg, L);
7167 __ stop("ObjectMonitor to use is null");
7168 __ bind(L);
7169 }
7170 #endif // ASSERT
7171
7172 __ set_last_Java_frame(sp, rfp, lr, rscratch1);
7173 __ mov(c_rarg0, rthread);
7174 __ rt_call(CAST_FROM_FN_PTR(address, SharedRuntime::redo_monitorenter));
7175 __ reset_last_Java_frame(true);
7176
7177 Label failAcquire;
7178 __ ldrb(rscratch1, Address(rthread, JavaThread::preempting_offset()));
7179 __ cbnz(rscratch1, failAcquire);
7180 // We have the lock now, just return to caller (we will actually hit the
7181 // return barrier to thaw more frames)
7182
7183 // ThawBase::push_preempt_monitorenter_redo set things up so that
7184 // SP now points to {fp, lr}.
7185 __ ldp(rfp, lr, Address(__ post(sp, 2 * wordSize)));
7186 __ ret(lr);
7187
7188 __ bind(failAcquire);
7189 __ strb(/*false*/zr, Address(rthread, JavaThread::preempting_offset()));
7190 // Set sp to enterSpecial frame
7191 __ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset()));
7192 __ mov(sp, rscratch1);
7193 // Remove enterSpecial frame from the stack and return to Continuation.run()
7194 SharedRuntime::continuation_enter_cleanup(_masm);
7195 __ leave();
7196 __ ret(lr);
7197
7198 return start;
7199 }
7200
7201 // In sun.security.util.math.intpoly.IntegerPolynomial1305, integers
7202 // are represented as long[5], with BITS_PER_LIMB = 26.
7203 // Pack five 26-bit limbs into three 64-bit registers.
7204 void pack_26(Register dest0, Register dest1, Register dest2, Register src) {
7205 __ ldp(dest0, rscratch1, Address(src, 0)); // 26 bits
7206 __ add(dest0, dest0, rscratch1, Assembler::LSL, 26); // 26 bits
7207 __ ldp(rscratch1, rscratch2, Address(src, 2 * sizeof (jlong)));
7208 __ add(dest0, dest0, rscratch1, Assembler::LSL, 52); // 12 bits
7209
7210 __ add(dest1, zr, rscratch1, Assembler::LSR, 12); // 14 bits
7211 __ add(dest1, dest1, rscratch2, Assembler::LSL, 14); // 26 bits
7212 __ ldr(rscratch1, Address(src, 4 * sizeof (jlong)));
7213 __ add(dest1, dest1, rscratch1, Assembler::LSL, 40); // 24 bits
7214
7215 if (dest2->is_valid()) {
7216 __ add(dest2, zr, rscratch1, Assembler::LSR, 24); // 2 bits
7217 } else {
7218 #ifdef ASSERT
7219 Label OK;
7220 __ cmp(zr, rscratch1, Assembler::LSR, 24); // 2 bits
8449 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
8450 StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
8451 }
8452
8453 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
8454 StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
8455 }
8456
8457 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
8458 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
8459 StubRoutines::_hf2f = generate_float16ToFloat();
8460 StubRoutines::_f2hf = generate_floatToFloat16();
8461 }
8462 }
8463
8464 void generate_continuation_stubs() {
8465 // Continuation stubs:
8466 StubRoutines::_cont_thaw = generate_cont_thaw();
8467 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
8468 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
8469 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
8470 StubRoutines::_cont_preempt_monitorenter_redo = generate_cont_preempt_monitorenter_redo();
8471 StubRoutines::_cont_preempt_rerun_compiler_adapter = generate_cont_preempt_rerun_compiler_adapter();
8472
8473 JFR_ONLY(generate_jfr_stubs();)
8474 }
8475
8476 #if INCLUDE_JFR
8477 void generate_jfr_stubs() {
8478 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
8479 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
8480 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
8481 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
8482 }
8483 #endif // INCLUDE_JFR
8484
8485 void generate_final_stubs() {
8486 // support for verify_oop (must happen after universe_init)
8487 if (VerifyOops) {
8488 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
8489 }
8490 StubRoutines::_throw_AbstractMethodError_entry =
8491 generate_throw_exception("AbstractMethodError throw_exception",
|