3848 // TODO: will probably need multiple return barriers depending on return type
3849 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
3850 address start = __ pc();
3851
3852 generate_cont_thaw(Continuation::thaw_return_barrier);
3853
3854 return start;
3855 }
3856
3857 address generate_cont_returnBarrier_exception() {
3858 if (!Continuations::enabled()) return nullptr;
3859
3860 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
3861 address start = __ pc();
3862
3863 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
3864
3865 return start;
3866 }
3867
3868 #if COMPILER2_OR_JVMCI
3869
3870 #undef __
3871 #define __ this->
3872
3873 class Sha2Generator : public MacroAssembler {
3874 StubCodeGenerator* _cgen;
3875 public:
3876 Sha2Generator(MacroAssembler* masm, StubCodeGenerator* cgen) : MacroAssembler(masm->code()), _cgen(cgen) {}
3877 address generate_sha256_implCompress(bool multi_block) {
3878 return generate_sha2_implCompress(Assembler::e32, multi_block);
3879 }
3880 address generate_sha512_implCompress(bool multi_block) {
3881 return generate_sha2_implCompress(Assembler::e64, multi_block);
3882 }
3883 private:
3884
3885 void vleXX_v(Assembler::SEW vset_sew, VectorRegister vr, Register sr) {
3886 if (vset_sew == Assembler::e32) __ vle32_v(vr, sr);
3887 else __ vle64_v(vr, sr);
6140 }
6141
6142 StubRoutines::_call_stub_entry =
6143 generate_call_stub(StubRoutines::_call_stub_return_address);
6144
6145 // is referenced by megamorphic call
6146 StubRoutines::_catch_exception_entry = generate_catch_exception();
6147
6148 if (UseCRC32Intrinsics) {
6149 // set table address before stub generation which use it
6150 StubRoutines::_crc_table_adr = (address)StubRoutines::riscv::_crc_table;
6151 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6152 }
6153 }
6154
6155 void generate_continuation_stubs() {
6156 // Continuation stubs:
6157 StubRoutines::_cont_thaw = generate_cont_thaw();
6158 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6159 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6160 }
6161
6162 void generate_final_stubs() {
6163 // support for verify_oop (must happen after universe_init)
6164 if (VerifyOops) {
6165 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6166 }
6167
6168 // arraycopy stubs used by compilers
6169 generate_arraycopy_stubs();
6170
6171 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
6172 if (bs_nm != nullptr) {
6173 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
6174 }
6175
6176 #ifdef COMPILER2
6177 if (UseSecondarySupersTable) {
6178 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
6179 if (!InlineSecondarySupersTest) {
|
3848 // TODO: will probably need multiple return barriers depending on return type
3849 StubCodeMark mark(this, "StubRoutines", "cont return barrier");
3850 address start = __ pc();
3851
3852 generate_cont_thaw(Continuation::thaw_return_barrier);
3853
3854 return start;
3855 }
3856
3857 address generate_cont_returnBarrier_exception() {
3858 if (!Continuations::enabled()) return nullptr;
3859
3860 StubCodeMark mark(this, "StubRoutines", "cont return barrier exception handler");
3861 address start = __ pc();
3862
3863 generate_cont_thaw(Continuation::thaw_return_barrier_exception);
3864
3865 return start;
3866 }
3867
3868 address generate_cont_preempt_stub() {
3869 if (!Continuations::enabled()) return nullptr;
3870 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
3871 address start = __ pc();
3872
3873 __ reset_last_Java_frame(true);
3874
3875 // reset the flag
3876 __ sb(zr, Address(xthread, JavaThread::preempting_offset()));
3877
3878 // Set sp to enterSpecial frame and then remove it from the stack
3879 __ ld(sp, Address(xthread, JavaThread::cont_entry_offset()));
3880
3881 Label preemption_cancelled;
3882 __ lbu(t0, Address(xthread, JavaThread::preemption_cancelled_offset()));
3883 __ bnez(t0, preemption_cancelled);
3884
3885 // Remove enterSpecial frame from the stack and return to Continuation.run()
3886 SharedRuntime::continuation_enter_cleanup(_masm);
3887 __ leave();
3888 __ ret();
3889
3890 __ bind(preemption_cancelled);
3891 __ sb(zr, Address(xthread, JavaThread::preemption_cancelled_offset()));
3892 __ la(fp, Address(sp, checked_cast<int32_t>(ContinuationEntry::size() + 2 * wordSize)));
3893 __ la(t0, ExternalAddress(ContinuationEntry::thaw_call_pc_address()));
3894 __ ld(t0, Address(t0));
3895 __ jr(t0);
3896
3897 return start;
3898 }
3899
3900 #if COMPILER2_OR_JVMCI
3901
3902 #undef __
3903 #define __ this->
3904
3905 class Sha2Generator : public MacroAssembler {
3906 StubCodeGenerator* _cgen;
3907 public:
3908 Sha2Generator(MacroAssembler* masm, StubCodeGenerator* cgen) : MacroAssembler(masm->code()), _cgen(cgen) {}
3909 address generate_sha256_implCompress(bool multi_block) {
3910 return generate_sha2_implCompress(Assembler::e32, multi_block);
3911 }
3912 address generate_sha512_implCompress(bool multi_block) {
3913 return generate_sha2_implCompress(Assembler::e64, multi_block);
3914 }
3915 private:
3916
3917 void vleXX_v(Assembler::SEW vset_sew, VectorRegister vr, Register sr) {
3918 if (vset_sew == Assembler::e32) __ vle32_v(vr, sr);
3919 else __ vle64_v(vr, sr);
6172 }
6173
6174 StubRoutines::_call_stub_entry =
6175 generate_call_stub(StubRoutines::_call_stub_return_address);
6176
6177 // is referenced by megamorphic call
6178 StubRoutines::_catch_exception_entry = generate_catch_exception();
6179
6180 if (UseCRC32Intrinsics) {
6181 // set table address before stub generation which use it
6182 StubRoutines::_crc_table_adr = (address)StubRoutines::riscv::_crc_table;
6183 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6184 }
6185 }
6186
6187 void generate_continuation_stubs() {
6188 // Continuation stubs:
6189 StubRoutines::_cont_thaw = generate_cont_thaw();
6190 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6191 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6192 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
6193 }
6194
6195 void generate_final_stubs() {
6196 // support for verify_oop (must happen after universe_init)
6197 if (VerifyOops) {
6198 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
6199 }
6200
6201 // arraycopy stubs used by compilers
6202 generate_arraycopy_stubs();
6203
6204 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
6205 if (bs_nm != nullptr) {
6206 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
6207 }
6208
6209 #ifdef COMPILER2
6210 if (UseSecondarySupersTable) {
6211 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
6212 if (!InlineSecondarySupersTest) {
|