6643 address start = __ pc();
6644
6645 __ resolve_global_jobject(j_rarg0, t0, t1);
6646 // Load target method from receiver
6647 __ load_heap_oop(xmethod, Address(j_rarg0, java_lang_invoke_MethodHandle::form_offset()), t0, t1);
6648 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_LambdaForm::vmentry_offset()), t0, t1);
6649 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_MemberName::method_offset()), t0, t1);
6650 __ access_load_at(T_ADDRESS, IN_HEAP, xmethod,
6651 Address(xmethod, java_lang_invoke_ResolvedMethodName::vmtarget_offset()),
6652 noreg, noreg);
6653 __ sd(xmethod, Address(xthread, JavaThread::callee_target_offset())); // just in case callee is deoptimized
6654
6655 __ ret();
6656
6657 return start;
6658 }
6659
6660 #undef __
6661
6662 // Initialization
6663 void generate_initial_stubs() {
6664 // Generate initial stubs and initializes the entry points
6665
6666 // entry points that exist in all platforms Note: This is code
6667 // that could be shared among different platforms - however the
6668 // benefit seems to be smaller than the disadvantage of having a
6669 // much more complicated generator structure. See also comment in
6670 // stubRoutines.hpp.
6671
6672 StubRoutines::_forward_exception_entry = generate_forward_exception();
6673
6674 if (UnsafeMemoryAccess::_table == nullptr) {
6675 UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
6676 }
6677
6678 StubRoutines::_call_stub_entry =
6679 generate_call_stub(StubRoutines::_call_stub_return_address);
6680
6681 // is referenced by megamorphic call
6682 StubRoutines::_catch_exception_entry = generate_catch_exception();
6683
6684 if (UseCRC32Intrinsics) {
6685 // set table address before stub generation which use it
6686 StubRoutines::_crc_table_adr = (address)StubRoutines::riscv::_crc_table;
6687 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6688 }
6689
6690 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
6691 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
6692 StubRoutines::_hf2f = generate_float16ToFloat();
6693 StubRoutines::_f2hf = generate_floatToFloat16();
6694 }
6695 }
6696
6697 void generate_continuation_stubs() {
6698 // Continuation stubs:
6699 StubRoutines::_cont_thaw = generate_cont_thaw();
6700 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6701 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6702 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
6703 }
6704
6705 void generate_final_stubs() {
6706 // support for verify_oop (must happen after universe_init)
6798
6799 if (UseBASE64Intrinsics) {
6800 StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
6801 StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
6802 }
6803
6804 if (UseAdler32Intrinsics) {
6805 StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
6806 }
6807
6808 generate_compare_long_strings();
6809
6810 generate_string_indexof_stubs();
6811
6812 #endif // COMPILER2
6813 }
6814
6815 public:
6816 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
6817 switch(blob_id) {
6818 case initial_id:
6819 generate_initial_stubs();
6820 break;
6821 case continuation_id:
6822 generate_continuation_stubs();
6823 break;
6824 case compiler_id:
6825 generate_compiler_stubs();
6826 break;
6827 case final_id:
6828 generate_final_stubs();
6829 break;
6830 default:
6831 fatal("unexpected blob id: %d", blob_id);
6832 break;
6833 };
6834 }
6835 }; // end class declaration
6836
6837 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
|
6643 address start = __ pc();
6644
6645 __ resolve_global_jobject(j_rarg0, t0, t1);
6646 // Load target method from receiver
6647 __ load_heap_oop(xmethod, Address(j_rarg0, java_lang_invoke_MethodHandle::form_offset()), t0, t1);
6648 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_LambdaForm::vmentry_offset()), t0, t1);
6649 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_MemberName::method_offset()), t0, t1);
6650 __ access_load_at(T_ADDRESS, IN_HEAP, xmethod,
6651 Address(xmethod, java_lang_invoke_ResolvedMethodName::vmtarget_offset()),
6652 noreg, noreg);
6653 __ sd(xmethod, Address(xthread, JavaThread::callee_target_offset())); // just in case callee is deoptimized
6654
6655 __ ret();
6656
6657 return start;
6658 }
6659
6660 #undef __
6661
6662 // Initialization
6663 void generate_preuniverse_stubs() {
6664 // preuniverse stubs are not needed for riscv
6665 }
6666
6667 void generate_initial_stubs() {
6668 // Generate initial stubs and initializes the entry points
6669
6670 // entry points that exist in all platforms Note: This is code
6671 // that could be shared among different platforms - however the
6672 // benefit seems to be smaller than the disadvantage of having a
6673 // much more complicated generator structure. See also comment in
6674 // stubRoutines.hpp.
6675
6676 StubRoutines::_forward_exception_entry = generate_forward_exception();
6677
6678 if (UnsafeMemoryAccess::_table == nullptr) {
6679 UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
6680 }
6681
6682 StubRoutines::_call_stub_entry =
6683 generate_call_stub(StubRoutines::_call_stub_return_address);
6684
6685 // is referenced by megamorphic call
6686 StubRoutines::_catch_exception_entry = generate_catch_exception();
6687
6688 if (UseCRC32Intrinsics) {
6689 StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
6690 }
6691
6692 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
6693 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
6694 StubRoutines::_hf2f = generate_float16ToFloat();
6695 StubRoutines::_f2hf = generate_floatToFloat16();
6696 }
6697 }
6698
6699 void generate_continuation_stubs() {
6700 // Continuation stubs:
6701 StubRoutines::_cont_thaw = generate_cont_thaw();
6702 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
6703 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
6704 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
6705 }
6706
6707 void generate_final_stubs() {
6708 // support for verify_oop (must happen after universe_init)
6800
6801 if (UseBASE64Intrinsics) {
6802 StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
6803 StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
6804 }
6805
6806 if (UseAdler32Intrinsics) {
6807 StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
6808 }
6809
6810 generate_compare_long_strings();
6811
6812 generate_string_indexof_stubs();
6813
6814 #endif // COMPILER2
6815 }
6816
6817 public:
6818 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
6819 switch(blob_id) {
6820 case preuniverse_id:
6821 generate_preuniverse_stubs();
6822 break;
6823 case initial_id:
6824 generate_initial_stubs();
6825 break;
6826 case continuation_id:
6827 generate_continuation_stubs();
6828 break;
6829 case compiler_id:
6830 generate_compiler_stubs();
6831 break;
6832 case final_id:
6833 generate_final_stubs();
6834 break;
6835 default:
6836 fatal("unexpected blob id: %d", blob_id);
6837 break;
6838 };
6839 }
6840 }; // end class declaration
6841
6842 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
|