3856 // TODO: will probably need multiple return barriers depending on return type
3857 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
3858 address start = __ pc();
3859
3860 generate_cont_thaw(Continuation::thaw_return_barrier);
3861
3862 return start;
3863 }
3864
3865 address generate_cont_returnBarrier_exception() {
3866 if (!Continuations::enabled()) return nullptr;
3867
3868 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
3869 address start = __ pc();
3870
3871 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
3872
3873 return start;
3874 }
3875
3876 #if COMPILER2_OR_JVMCI
3877
3878 #undef __
3879 #define __ this->
3880
3881 class Sha2Generator : public MacroAssembler {
3882 StubCodeGenerator* _cgen;
3883 public:
3884 Sha2Generator(MacroAssembler* masm, StubCodeGenerator* cgen) : MacroAssembler(masm->code()), _cgen(cgen) {}
3885 address generate_sha256_implCompress(bool multi_block) {
3886 return generate_sha2_implCompress(Assembler::e32, multi_block);
3887 }
3888 address generate_sha512_implCompress(bool multi_block) {
3889 return generate_sha2_implCompress(Assembler::e64, multi_block);
3890 }
3891 private:
3892
3893 void vleXX_v(Assembler::SEW vset_sew, VectorRegister vr, Register sr) {
3894 if (vset_sew == Assembler::e32) __ vle32_v(vr, sr);
3895 else __ vle64_v(vr, sr);
6166 }
6167
6168 StubRoutines::_call_stub_entry =
6169 generate_call_stub(StubRoutines::_call_stub_return_address);
6170
6171 // is referenced by megamorphic call
6172 StubRoutines::_catch_exception_entry = generate_catch_exception();
6173
6174 if (UseCRC32Intrinsics) {
6175 // set table address before stub generation which use it
6176 StubRoutines::_crc_table_adr = (address)StubRoutines::riscv::_crc_table;
6177 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6178 }
6179 }
6180
6181 void generate_continuation_stubs() {
6182 // Continuation stubs:
6183 StubRoutines::_cont_thaw = generate_cont_thaw();
6184 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6185 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6186 }
6187
6188 void generate_final_stubs() {
6189 // support for verify_oop (must happen after universe_init)
6190 if (VerifyOops) {
6191 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6192 }
6193
6194 // arraycopy stubs used by compilers
6195 generate_arraycopy_stubs();
6196
6197 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
6198 if (bs_nm != nullptr) {
6199 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
6200 }
6201
6202 #ifdef COMPILER2
6203 if (UseSecondarySupersTable) {
6204 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
6205 if (!InlineSecondarySupersTest) {
|
3856 // TODO: will probably need multiple return barriers depending on return type
3857 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
3858 address start = __ pc();
3859
3860 generate_cont_thaw(Continuation::thaw_return_barrier);
3861
3862 return start;
3863 }
3864
3865 address generate_cont_returnBarrier_exception() {
3866 if (!Continuations::enabled()) return nullptr;
3867
3868 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
3869 address start = __ pc();
3870
3871 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
3872
3873 return start;
3874 }
3875
3876 address generate_cont_preempt_stub() {
3877 if (!Continuations::enabled()) return nullptr;
3878 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
3879 address start = __ pc();
3880
3881 __ reset_last_Java_frame(true);
3882
3883 // reset the flag
3884 __ sb(zr, Address(xthread, JavaThread::preempting_offset()));
3885
3886 // Set sp to enterSpecial frame and then remove it from the stack
3887 __ ld(sp, Address(xthread, JavaThread::cont_entry_offset()));
3888
3889 Label preemption_cancelled;
3890 __ lbu(t0, Address(xthread, JavaThread::preemption_cancelled_offset()));
3891 __ bnez(t0, preemption_cancelled);
3892
3893 // Remove enterSpecial frame from the stack and return to Continuation.run()
3894 SharedRuntime::continuation_enter_cleanup(_masm);
3895 __ leave();
3896 __ ret();
3897
3898 __ bind(preemption_cancelled);
3899 __ sb(zr, Address(xthread, JavaThread::preemption_cancelled_offset()));
3900 __ la(fp, Address(sp, checked_cast<int32_t>(ContinuationEntry::size() + 2 * wordSize)));
3901 __ la(t0, ExternalAddress(ContinuationEntry::thaw_call_pc_address()));
3902 __ ld(t0, Address(t0));
3903 __ jr(t0);
3904
3905 return start;
3906 }
3907
3908 #if COMPILER2_OR_JVMCI
3909
3910 #undef __
3911 #define __ this->
3912
3913 class Sha2Generator : public MacroAssembler {
3914 StubCodeGenerator* _cgen;
3915 public:
3916 Sha2Generator(MacroAssembler* masm, StubCodeGenerator* cgen) : MacroAssembler(masm->code()), _cgen(cgen) {}
3917 address generate_sha256_implCompress(bool multi_block) {
3918 return generate_sha2_implCompress(Assembler::e32, multi_block);
3919 }
3920 address generate_sha512_implCompress(bool multi_block) {
3921 return generate_sha2_implCompress(Assembler::e64, multi_block);
3922 }
3923 private:
3924
3925 void vleXX_v(Assembler::SEW vset_sew, VectorRegister vr, Register sr) {
3926 if (vset_sew == Assembler::e32) __ vle32_v(vr, sr);
3927 else __ vle64_v(vr, sr);
6198 }
6199
6200 StubRoutines::_call_stub_entry =
6201 generate_call_stub(StubRoutines::_call_stub_return_address);
6202
6203 // is referenced by megamorphic call
6204 StubRoutines::_catch_exception_entry = generate_catch_exception();
6205
6206 if (UseCRC32Intrinsics) {
6207 // set table address before stub generation which use it
6208 StubRoutines::_crc_table_adr = (address)StubRoutines::riscv::_crc_table;
6209 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6210 }
6211 }
6212
6213 void generate_continuation_stubs() {
6214 // Continuation stubs:
6215 StubRoutines::_cont_thaw = generate_cont_thaw();
6216 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6217 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6218 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
6219 }
6220
6221 void generate_final_stubs() {
6222 // support for verify_oop (must happen after universe_init)
6223 if (VerifyOops) {
6224 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6225 }
6226
6227 // arraycopy stubs used by compilers
6228 generate_arraycopy_stubs();
6229
6230 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
6231 if (bs_nm != nullptr) {
6232 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
6233 }
6234
6235 #ifdef COMPILER2
6236 if (UseSecondarySupersTable) {
6237 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
6238 if (!InlineSecondarySupersTest) {
|