< prev index next >

src/hotspot/cpu/ppc/stubGenerator_ppc.cpp

Print this page

4923     StubCodeMark mark(this, stub_id);
4924     address start = __ pc();
4925 
4926     __ resolve_global_jobject(R3_ARG1, R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS);
4927     // Load target method from receiver
4928     __ load_heap_oop(R19_method, java_lang_invoke_MethodHandle::form_offset(), R3_ARG1,
4929                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4930     __ load_heap_oop(R19_method, java_lang_invoke_LambdaForm::vmentry_offset(), R19_method,
4931                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4932     __ load_heap_oop(R19_method, java_lang_invoke_MemberName::method_offset(), R19_method,
4933                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4934     __ ld(R19_method, java_lang_invoke_ResolvedMethodName::vmtarget_offset(), R19_method);
4935     __ std(R19_method, in_bytes(JavaThread::callee_target_offset()), R16_thread); // just in case callee is deoptimized
4936 
4937     __ blr();
4938 
4939     return start;
4940   }
4941 
4942   // Initialization




4943   void generate_initial_stubs() {
4944     // Generates all stubs and initializes the entry points
4945 
4946     // Entry points that exist in all platforms.
4947     // Note: This is code that could be shared among different platforms - however the
4948     // benefit seems to be smaller than the disadvantage of having a
4949     // much more complicated generator structure. See also comment in
4950     // stubRoutines.hpp.
4951 
4952     StubRoutines::_forward_exception_entry          = generate_forward_exception();
4953     StubRoutines::_call_stub_entry                  = generate_call_stub(StubRoutines::_call_stub_return_address);
4954     StubRoutines::_catch_exception_entry            = generate_catch_exception();
4955 
4956     if (UnsafeMemoryAccess::_table == nullptr) {
4957       UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
4958     }
4959 
4960     // CRC32 Intrinsics.
4961     if (UseCRC32Intrinsics) {
4962       StubRoutines::_crc_table_adr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32_POLY);

5052       StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(StubGenStubId::sha256_implCompressMB_id);
5053     }
5054     if (UseSHA512Intrinsics) {
5055       StubRoutines::_sha512_implCompress   = generate_sha512_implCompress(StubGenStubId::sha512_implCompress_id);
5056       StubRoutines::_sha512_implCompressMB = generate_sha512_implCompress(StubGenStubId::sha512_implCompressMB_id);
5057     }
5058 
5059 #ifdef VM_LITTLE_ENDIAN
5060     // Currently supported on PPC64LE only
5061     if (UseBASE64Intrinsics) {
5062       StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
5063       StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
5064     }
5065 #endif
5066 #endif // COMPILER2_OR_JVMCI
5067   }
5068 
5069  public:
5070   StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
5071     switch(blob_id) {



5072     case initial_id:
5073       generate_initial_stubs();
5074       break;
5075      case continuation_id:
5076       generate_continuation_stubs();
5077       break;
5078     case compiler_id:
5079       generate_compiler_stubs();
5080       break;
5081     case final_id:
5082       generate_final_stubs();
5083       break;
5084     default:
5085       fatal("unexpected blob id: %d", blob_id);
5086       break;
5087     };
5088   }
5089 };
5090 
5091 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {

4923     StubCodeMark mark(this, stub_id);
4924     address start = __ pc();
4925 
4926     __ resolve_global_jobject(R3_ARG1, R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS);
4927     // Load target method from receiver
4928     __ load_heap_oop(R19_method, java_lang_invoke_MethodHandle::form_offset(), R3_ARG1,
4929                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4930     __ load_heap_oop(R19_method, java_lang_invoke_LambdaForm::vmentry_offset(), R19_method,
4931                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4932     __ load_heap_oop(R19_method, java_lang_invoke_MemberName::method_offset(), R19_method,
4933                      R22_tmp2, R23_tmp3, MacroAssembler::PRESERVATION_FRAME_LR_GP_FP_REGS, IS_NOT_NULL);
4934     __ ld(R19_method, java_lang_invoke_ResolvedMethodName::vmtarget_offset(), R19_method);
4935     __ std(R19_method, in_bytes(JavaThread::callee_target_offset()), R16_thread); // just in case callee is deoptimized
4936 
4937     __ blr();
4938 
4939     return start;
4940   }
4941 
4942   // Initialization
4943   void generate_preuniverse_stubs() {
4944     // preuniverse stubs are not needed for ppc
4945   }
4946 
4947   void generate_initial_stubs() {
4948     // Generates all stubs and initializes the entry points
4949 
4950     // Entry points that exist in all platforms.
4951     // Note: This is code that could be shared among different platforms - however the
4952     // benefit seems to be smaller than the disadvantage of having a
4953     // much more complicated generator structure. See also comment in
4954     // stubRoutines.hpp.
4955 
4956     StubRoutines::_forward_exception_entry          = generate_forward_exception();
4957     StubRoutines::_call_stub_entry                  = generate_call_stub(StubRoutines::_call_stub_return_address);
4958     StubRoutines::_catch_exception_entry            = generate_catch_exception();
4959 
4960     if (UnsafeMemoryAccess::_table == nullptr) {
4961       UnsafeMemoryAccess::create_table(8 + 4); // 8 for copyMemory; 4 for setMemory
4962     }
4963 
4964     // CRC32 Intrinsics.
4965     if (UseCRC32Intrinsics) {
4966       StubRoutines::_crc_table_adr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32_POLY);

5056       StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(StubGenStubId::sha256_implCompressMB_id);
5057     }
5058     if (UseSHA512Intrinsics) {
5059       StubRoutines::_sha512_implCompress   = generate_sha512_implCompress(StubGenStubId::sha512_implCompress_id);
5060       StubRoutines::_sha512_implCompressMB = generate_sha512_implCompress(StubGenStubId::sha512_implCompressMB_id);
5061     }
5062 
5063 #ifdef VM_LITTLE_ENDIAN
5064     // Currently supported on PPC64LE only
5065     if (UseBASE64Intrinsics) {
5066       StubRoutines::_base64_decodeBlock = generate_base64_decodeBlock();
5067       StubRoutines::_base64_encodeBlock = generate_base64_encodeBlock();
5068     }
5069 #endif
5070 #endif // COMPILER2_OR_JVMCI
5071   }
5072 
5073  public:
5074   StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
5075     switch(blob_id) {
5076     case preuniverse_id:
5077       generate_preuniverse_stubs();
5078       break;
5079     case initial_id:
5080       generate_initial_stubs();
5081       break;
5082      case continuation_id:
5083       generate_continuation_stubs();
5084       break;
5085     case compiler_id:
5086       generate_compiler_stubs();
5087       break;
5088     case final_id:
5089       generate_final_stubs();
5090       break;
5091     default:
5092       fatal("unexpected blob id: %d", blob_id);
5093       break;
5094     };
5095   }
5096 };
5097 
5098 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
< prev index next >