< prev index next >

src/hotspot/cpu/arm/stubGenerator_arm.cpp

Print this page

3109     if (!Continuations::enabled()) return nullptr;
3110     Unimplemented();
3111     return nullptr;
3112   }
3113 
3114   address generate_cont_thaw() {
3115     return generate_cont_thaw(StubGenStubId::cont_thaw_id);
3116   }
3117 
3118   address generate_cont_returnBarrier() {
3119     return generate_cont_thaw(StubGenStubId::cont_returnBarrier_id);
3120   }
3121 
3122   address generate_cont_returnBarrier_exception() {
3123     return generate_cont_thaw(StubGenStubId::cont_returnBarrierExc_id);
3124   }
3125 
3126   //---------------------------------------------------------------------------
3127   // Initialization
3128 











3129   void generate_initial_stubs() {
3130     // Generates all stubs and initializes the entry points
3131 
3132     //------------------------------------------------------------------------------------------------------------------------
3133     // entry points that exist in all platforms
3134     // Note: This is code that could be shared among different platforms - however the benefit seems to be smaller than
3135     //       the disadvantage of having a much more complicated generator structure. See also comment in stubRoutines.hpp.
3136     StubRoutines::_forward_exception_entry      = generate_forward_exception();
3137 
3138     StubRoutines::_call_stub_entry              =
3139       generate_call_stub(StubRoutines::_call_stub_return_address);
3140     // is referenced by megamorphic call
3141     StubRoutines::_catch_exception_entry        = generate_catch_exception();
3142 
3143     // stub for throwing stack overflow error used both by interpreter and compiler
3144     if (UnsafeMemoryAccess::_table == nullptr) {
3145       UnsafeMemoryAccess::create_table(32 + 4); // 32 for copyMemory; 4 for setMemory
3146     }
3147 
3148     // integer division used both by interpreter and compiler
3149     StubRoutines::Arm::_idiv_irem_entry = generate_idiv_irem();
3150 
3151     StubRoutines::_atomic_add_entry = generate_atomic_add();
3152     StubRoutines::_atomic_xchg_entry = generate_atomic_xchg();
3153     StubRoutines::_atomic_cmpxchg_entry = generate_atomic_cmpxchg();
3154     StubRoutines::_atomic_cmpxchg_long_entry = generate_atomic_cmpxchg_long();
3155     StubRoutines::Arm::_atomic_load_long_entry = generate_atomic_load_long();
3156     StubRoutines::Arm::_atomic_store_long_entry = generate_atomic_store_long();
3157 
3158   }
3159 
3160   void generate_continuation_stubs() {
3161     // Continuation stubs:
3162     StubRoutines::_cont_thaw          = generate_cont_thaw();
3163     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3164     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3165   }
3166 
3167   void generate_final_stubs() {
3168     // Generates all stubs and initializes the entry points
3169 
3170     //------------------------------------------------------------------------------------------------------------------------
3171     // entry points that are platform specific
3172 
3173     // support for verify_oop (must happen after universe_init)
3174     StubRoutines::_verify_oop_subroutine_entry     = generate_verify_oop();
3175 
3176     // arraycopy stubs used by compilers
3177     generate_arraycopy_stubs();

3184     // Generate partial_subtype_check first here since its code depends on
3185     // UseZeroBaseCompressedOops which is defined after heap initialization.
3186     StubRoutines::Arm::_partial_subtype_check                = generate_partial_subtype_check();
3187 
3188 #ifdef COMPILE_CRYPTO
3189     // generate AES intrinsics code
3190     if (UseAESIntrinsics) {
3191       aes_init();
3192       StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3193       StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3194       StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3195       StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt();
3196     }
3197 #endif // COMPILE_CRYPTO
3198 #endif // COMPILER2
3199   }
3200 
3201  public:
3202   StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
3203     switch(blob_id) {



3204     case initial_id:
3205       generate_initial_stubs();
3206       break;
3207      case continuation_id:
3208       generate_continuation_stubs();
3209       break;
3210     case compiler_id:
3211       generate_compiler_stubs();
3212       break;
3213     case final_id:
3214       generate_final_stubs();
3215       break;
3216     default:
3217       fatal("unexpected blob id: %d", blob_id);
3218       break;
3219     };
3220   }
3221 }; // end class declaration
3222 
3223 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {

3109     if (!Continuations::enabled()) return nullptr;
3110     Unimplemented();
3111     return nullptr;
3112   }
3113 
3114   address generate_cont_thaw() {
3115     return generate_cont_thaw(StubGenStubId::cont_thaw_id);
3116   }
3117 
3118   address generate_cont_returnBarrier() {
3119     return generate_cont_thaw(StubGenStubId::cont_returnBarrier_id);
3120   }
3121 
3122   address generate_cont_returnBarrier_exception() {
3123     return generate_cont_thaw(StubGenStubId::cont_returnBarrierExc_id);
3124   }
3125 
3126   //---------------------------------------------------------------------------
3127   // Initialization
3128 
3129   void generate_preuniverse_stubs() {
3130     // Atomics are used in universe initialization code (e.g. CDS relocation),
3131     // therefore we need to generate real stubs very early on.
3132     StubRoutines::_atomic_add_entry = generate_atomic_add();
3133     StubRoutines::_atomic_xchg_entry = generate_atomic_xchg();
3134     StubRoutines::_atomic_cmpxchg_entry = generate_atomic_cmpxchg();
3135     StubRoutines::_atomic_cmpxchg_long_entry = generate_atomic_cmpxchg_long();
3136     StubRoutines::Arm::_atomic_load_long_entry = generate_atomic_load_long();
3137     StubRoutines::Arm::_atomic_store_long_entry = generate_atomic_store_long();
3138   }
3139 
3140   void generate_initial_stubs() {
3141     // Generates all stubs and initializes the entry points
3142 
3143     //------------------------------------------------------------------------------------------------------------------------
3144     // entry points that exist in all platforms
3145     // Note: This is code that could be shared among different platforms - however the benefit seems to be smaller than
3146     //       the disadvantage of having a much more complicated generator structure. See also comment in stubRoutines.hpp.
3147     StubRoutines::_forward_exception_entry      = generate_forward_exception();
3148 
3149     StubRoutines::_call_stub_entry              =
3150       generate_call_stub(StubRoutines::_call_stub_return_address);
3151     // is referenced by megamorphic call
3152     StubRoutines::_catch_exception_entry        = generate_catch_exception();
3153 
3154     // stub for throwing stack overflow error used both by interpreter and compiler
3155     if (UnsafeMemoryAccess::_table == nullptr) {
3156       UnsafeMemoryAccess::create_table(32 + 4); // 32 for copyMemory; 4 for setMemory
3157     }
3158 
3159     // integer division used both by interpreter and compiler
3160     StubRoutines::Arm::_idiv_irem_entry = generate_idiv_irem();








3161   }
3162 
3163   void generate_continuation_stubs() {
3164     // Continuation stubs:
3165     StubRoutines::_cont_thaw          = generate_cont_thaw();
3166     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3167     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3168   }
3169 
3170   void generate_final_stubs() {
3171     // Generates all stubs and initializes the entry points
3172 
3173     //------------------------------------------------------------------------------------------------------------------------
3174     // entry points that are platform specific
3175 
3176     // support for verify_oop (must happen after universe_init)
3177     StubRoutines::_verify_oop_subroutine_entry     = generate_verify_oop();
3178 
3179     // arraycopy stubs used by compilers
3180     generate_arraycopy_stubs();

3187     // Generate partial_subtype_check first here since its code depends on
3188     // UseZeroBaseCompressedOops which is defined after heap initialization.
3189     StubRoutines::Arm::_partial_subtype_check                = generate_partial_subtype_check();
3190 
3191 #ifdef COMPILE_CRYPTO
3192     // generate AES intrinsics code
3193     if (UseAESIntrinsics) {
3194       aes_init();
3195       StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3196       StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3197       StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3198       StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt();
3199     }
3200 #endif // COMPILE_CRYPTO
3201 #endif // COMPILER2
3202   }
3203 
3204  public:
3205   StubGenerator(CodeBuffer* code, StubGenBlobId blob_id) : StubCodeGenerator(code, blob_id) {
3206     switch(blob_id) {
3207     case preuniverse_id:
3208       generate_preuniverse_stubs();
3209       break;
3210     case initial_id:
3211       generate_initial_stubs();
3212       break;
3213      case continuation_id:
3214       generate_continuation_stubs();
3215       break;
3216     case compiler_id:
3217       generate_compiler_stubs();
3218       break;
3219     case final_id:
3220       generate_final_stubs();
3221       break;
3222     default:
3223       fatal("unexpected blob id: %d", blob_id);
3224       break;
3225     };
3226   }
3227 }; // end class declaration
3228 
3229 void StubGenerator_generate(CodeBuffer* code, StubGenBlobId blob_id) {
< prev index next >