< prev index next > src/hotspot/share/code/aotCodeCache.cpp
Print this page
// Disable stubs caching until JDK-8357398 is fixed.
FLAG_SET_ERGO(AOTStubCaching, false);
if (VerifyOops) {
- // Disable AOT stubs caching when VerifyOops flag is on.
+ // Disable AOT stub caching when VerifyOops flag is on.
// Verify oops code generated a lot of C strings which overflow
// AOT C string table (which has fixed size).
// AOT C string table will be reworked later to handle such cases.
- //
- // Note: AOT adapters are not affected - they don't have oop operations.
- log_info(aot, codecache, init)("AOT Stubs Caching is not supported with VerifyOops.");
+ log_info(aot, codecache, init)("AOT Stub Caching is not supported with VerifyOops.");
FLAG_SET_ERGO(AOTStubCaching, false);
+ if (InlineTypePassFieldsAsArgs) {
+ log_info(aot, codecache, init)("AOT Adapter Caching is not supported with VerifyOops + InlineTypePassFieldsAsArgs.");
+ FLAG_SET_ERGO(AOTAdapterCaching, false);
+ }
}
bool is_dumping = false;
bool is_using = false;
if (CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes()) {
log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with EnableJVMCI = %s vs current %s", (_enableJVMCI ? "true" : "false"), (EnableJVMCI ? "true" : "false"));
return false;
}
#endif // INCLUDE_JVMCI
- // The following checks do not affect AOT adapters caching
+ // The following checks do not affect AOT code, but can disable
+ // AOT stub/adapters caching if they are incompatible with runtime settings
+ // (adapters too as they access oops when buffering scalarized value objects).
if (test_flag(compressedOops) != UseCompressedOops) {
- log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with UseCompressedOops = %s", UseCompressedOops ? "false" : "true");
+ log_debug(aot, codecache, init)("AOT Stub/Adapter Cache disabled: it was created with UseCompressedOops = %s", UseCompressedOops ? "false" : "true");
AOTStubCaching = false;
+ if (InlineTypePassFieldsAsArgs) {
+ AOTAdapterCaching = false;
+ }
}
if (_compressedOopShift != (uint)CompressedOops::shift()) {
- log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with different CompressedOops::shift(): %d vs current %d", _compressedOopShift, CompressedOops::shift());
+ log_debug(aot, codecache, init)("AOT Stub/Adapter Cache disabled: it was created with different CompressedOops::shift(): %d vs current %d", _compressedOopShift, CompressedOops::shift());
AOTStubCaching = false;
+ if (InlineTypePassFieldsAsArgs) {
+ AOTAdapterCaching = false;
+ }
}
- // This should be the last check as it only disables AOTStubCaching
+ // This should be the last check as it only disables AOTStub/AdapterCaching
if ((_compressedOopBase == nullptr || CompressedOops::base() == nullptr) && (_compressedOopBase != CompressedOops::base())) {
- log_debug(aot, codecache, init)("AOTStubCaching is disabled: incompatible CompressedOops::base(): %p vs current %p", _compressedOopBase, CompressedOops::base());
+ log_debug(aot, codecache, init)("AOT Stub/Adapter Cache disabled: incompatible CompressedOops::base(): %p vs current %p", _compressedOopBase, CompressedOops::base());
AOTStubCaching = false;
+ if (InlineTypePassFieldsAsArgs) {
+ AOTAdapterCaching = false;
+ }
}
return true;
}
// Record addresses of VM runtime methods
SET_ADDRESS(_extrs, SharedRuntime::fixup_callers_callsite);
SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method);
SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method_abstract);
SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method_ic_miss);
+ SET_ADDRESS(_extrs, SharedRuntime::allocate_inline_types);
#if defined(AARCH64) && !defined(ZERO)
SET_ADDRESS(_extrs, JavaThread::aarch64_get_thread_helper);
#endif
{
// Required by Shared blobs
SET_ADDRESS(_extrs, Runtime1::move_klass_patching);
SET_ADDRESS(_extrs, Runtime1::move_mirror_patching);
SET_ADDRESS(_extrs, Runtime1::move_appendix_patching);
SET_ADDRESS(_extrs, Runtime1::predicate_failed_trap);
SET_ADDRESS(_extrs, Runtime1::unimplemented_entry);
+ SET_ADDRESS(_extrs, Runtime1::new_null_free_array);
+ SET_ADDRESS(_extrs, Runtime1::load_flat_array);
+ SET_ADDRESS(_extrs, Runtime1::store_flat_array);
+ SET_ADDRESS(_extrs, Runtime1::substitutability_check);
+ SET_ADDRESS(_extrs, Runtime1::buffer_inline_args);
+ SET_ADDRESS(_extrs, Runtime1::buffer_inline_args_no_receiver);
+ SET_ADDRESS(_extrs, Runtime1::throw_identity_exception);
+ SET_ADDRESS(_extrs, Runtime1::throw_illegal_monitor_state_exception);
SET_ADDRESS(_extrs, Thread::current);
SET_ADDRESS(_extrs, CompressedKlassPointers::base_addr());
#ifndef PRODUCT
SET_ADDRESS(_extrs, os::breakpoint);
#endif
SET_ADDRESS(_extrs, OptoRuntime::monitor_notify_C);
SET_ADDRESS(_extrs, OptoRuntime::monitor_notifyAll_C);
SET_ADDRESS(_extrs, OptoRuntime::rethrow_C);
SET_ADDRESS(_extrs, OptoRuntime::slow_arraycopy_C);
SET_ADDRESS(_extrs, OptoRuntime::register_finalizer_C);
+ SET_ADDRESS(_extrs, OptoRuntime::load_unknown_inline_C);
+ SET_ADDRESS(_extrs, OptoRuntime::store_unknown_inline_C);
SET_ADDRESS(_extrs, OptoRuntime::vthread_end_first_transition_C);
SET_ADDRESS(_extrs, OptoRuntime::vthread_start_final_transition_C);
SET_ADDRESS(_extrs, OptoRuntime::vthread_start_transition_C);
SET_ADDRESS(_extrs, OptoRuntime::vthread_end_transition_C);
#if defined(AARCH64)
#if defined(AMD64) || defined(AARCH64) || defined(RISCV64)
SET_ADDRESS(_extrs, MacroAssembler::debug64);
#endif
#endif // ZERO
+ if (UseCompressedOops) {
+ SET_ADDRESS(_extrs, CompressedOops::base_addr());
+ }
+
// addresses of fields in AOT runtime constants area
address* p = AOTRuntimeConstants::field_addresses_list();
while (*p != nullptr) {
SET_ADDRESS(_extrs, *p++);
}
< prev index next >