6643 address start = __ pc();
6644
6645 __ resolve_global_jobject(j_rarg0, t0, t1);
6646 // Load target method from receiver
6647 __ load_heap_oop(xmethod, Address(j_rarg0, java_lang_invoke_MethodHandle::form_offset()), t0, t1);
6648 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_LambdaForm::vmentry_offset()), t0, t1);
6649 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_MemberName::method_offset()), t0, t1);
6650 __ access_load_at(T_ADDRESS, IN_HEAP, xmethod,
6651 Address(xmethod, java_lang_invoke_ResolvedMethodName::vmtarget_offset()),
6652 noreg, noreg);
6653 __ sd(xmethod, Address(xthread, JavaThread::callee_target_offset())); // just in case callee is deoptimized
6654
6655 __ ret();
6656
6657 return start;
6658 }
6659
6660 #undef __
6661
6662 // Initialization
6663 void generate_initial_stubs() {
6664 // Generate initial stubs and initializes the entry points
6665
6666 // entry points that exist in all platforms Note: This is code
6667 // that could be shared among different platforms - however the
6668 // benefit seems to be smaller than the disadvantage of having a
6669 // much more complicated generator structure. See also comment in
6670 // stubRoutines.hpp.
6671
6672 StubRoutines::_forward_exception_entry = generate_forward_exception();
6673
6674 if (UnsafeMemoryAccess::_table == nullptr) {
6675 UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
6676 }
6677
6678 StubRoutines::_call_stub_entry =
6679 generate_call_stub(StubRoutines::_call_stub_return_address);
6680
6681 // is referenced by megamorphic call
6682 StubRoutines::_catch_exception_entry = generate_catch_exception();
6798
6799 if (UseBASE64Intrinsics) {
6800 StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
6801 StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
6802 }
6803
6804 if (UseAdler32Intrinsics) {
6805 StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
6806 }
6807
6808 generate_compare_long_strings();
6809
6810 generate_string_indexof_stubs();
6811
6812 #endif // COMPILER2
6813 }
6814
6815 public:
6816 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
6817 switch(blob_id) {
6818 case initial_id:
6819 generate_initial_stubs();
6820 break;
6821 case continuation_id:
6822 generate_continuation_stubs();
6823 break;
6824 case compiler_id:
6825 generate_compiler_stubs();
6826 break;
6827 case final_id:
6828 generate_final_stubs();
6829 break;
6830 default:
6831 fatal("unexpected blob id: %d", blob_id);
6832 break;
6833 };
6834 }
6835 }; // end class declaration
6836
6837 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
|
6643 address start = __ pc();
6644
6645 __ resolve_global_jobject(j_rarg0, t0, t1);
6646 // Load target method from receiver
6647 __ load_heap_oop(xmethod, Address(j_rarg0, java_lang_invoke_MethodHandle::form_offset()), t0, t1);
6648 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_LambdaForm::vmentry_offset()), t0, t1);
6649 __ load_heap_oop(xmethod, Address(xmethod, java_lang_invoke_MemberName::method_offset()), t0, t1);
6650 __ access_load_at(T_ADDRESS, IN_HEAP, xmethod,
6651 Address(xmethod, java_lang_invoke_ResolvedMethodName::vmtarget_offset()),
6652 noreg, noreg);
6653 __ sd(xmethod, Address(xthread, JavaThread::callee_target_offset())); // just in case callee is deoptimized
6654
6655 __ ret();
6656
6657 return start;
6658 }
6659
6660 #undef __
6661
6662 // Initialization
6663 void generate_preuniverse_stubs() {
6664 // preuniverse stubs are not needed for riscv
6665 }
6666
6667 void generate_initial_stubs() {
6668 // Generate initial stubs and initializes the entry points
6669
6670 // entry points that exist in all platforms Note: This is code
6671 // that could be shared among different platforms - however the
6672 // benefit seems to be smaller than the disadvantage of having a
6673 // much more complicated generator structure. See also comment in
6674 // stubRoutines.hpp.
6675
6676 StubRoutines::_forward_exception_entry = generate_forward_exception();
6677
6678 if (UnsafeMemoryAccess::_table == nullptr) {
6679 UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
6680 }
6681
6682 StubRoutines::_call_stub_entry =
6683 generate_call_stub(StubRoutines::_call_stub_return_address);
6684
6685 // is referenced by megamorphic call
6686 StubRoutines::_catch_exception_entry = generate_catch_exception();
6802
6803 if (UseBASE64Intrinsics) {
6804 StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
6805 StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
6806 }
6807
6808 if (UseAdler32Intrinsics) {
6809 StubRoutines::_updateBytesAdler32 = generate_updateBytesAdler32();
6810 }
6811
6812 generate_compare_long_strings();
6813
6814 generate_string_indexof_stubs();
6815
6816 #endif // COMPILER2
6817 }
6818
6819 public:
6820 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
6821 switch(blob_id) {
6822 case preuniverse_id:
6823 generate_preuniverse_stubs();
6824 break;
6825 case initial_id:
6826 generate_initial_stubs();
6827 break;
6828 case continuation_id:
6829 generate_continuation_stubs();
6830 break;
6831 case compiler_id:
6832 generate_compiler_stubs();
6833 break;
6834 case final_id:
6835 generate_final_stubs();
6836 break;
6837 default:
6838 fatal("unexpected blob id: %d", blob_id);
6839 break;
6840 };
6841 }
6842 }; // end class declaration
6843
6844 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
|