< prev index next > src/hotspot/share/cds/metaspaceShared.cpp
Print this page
* questions.
*
*/
#include "precompiled.hpp"
#include "cds/archiveBuilder.hpp"
#include "cds/archiveHeapLoader.hpp"
#include "cds/archiveHeapWriter.hpp"
#include "cds/cds_globals.hpp"
#include "cds/cdsConfig.hpp"
#include "cds/cdsProtectionDomain.hpp"
#include "cds/cds_globals.hpp"
#include "cds/classListParser.hpp"
#include "cds/classListWriter.hpp"
- #include "cds/classPrelinker.hpp"
#include "cds/cppVtables.hpp"
#include "cds/dumpAllocStats.hpp"
#include "cds/dynamicArchive.hpp"
#include "cds/filemap.hpp"
#include "cds/heapShared.hpp"
#include "cds/lambdaFormInvokers.hpp"
#include "cds/metaspaceShared.hpp"
#include "classfile/classLoaderDataGraph.hpp"
#include "classfile/classLoaderDataShared.hpp"
* questions.
*
*/
#include "precompiled.hpp"
+ #include "cds/aotClassInitializer.hpp"
+ #include "cds/aotClassLinker.hpp"
+ #include "cds/aotConstantPoolResolver.hpp"
+ #include "cds/aotLinkedClassBulkLoader.hpp"
#include "cds/archiveBuilder.hpp"
#include "cds/archiveHeapLoader.hpp"
#include "cds/archiveHeapWriter.hpp"
#include "cds/cds_globals.hpp"
+ #include "cds/cdsAccess.hpp"
#include "cds/cdsConfig.hpp"
#include "cds/cdsProtectionDomain.hpp"
#include "cds/cds_globals.hpp"
#include "cds/classListParser.hpp"
#include "cds/classListWriter.hpp"
#include "cds/cppVtables.hpp"
#include "cds/dumpAllocStats.hpp"
#include "cds/dynamicArchive.hpp"
#include "cds/filemap.hpp"
+ #include "cds/finalImageRecipes.hpp"
#include "cds/heapShared.hpp"
#include "cds/lambdaFormInvokers.hpp"
#include "cds/metaspaceShared.hpp"
#include "classfile/classLoaderDataGraph.hpp"
#include "classfile/classLoaderDataShared.hpp"
#include "classfile/systemDictionary.hpp"
#include "classfile/systemDictionaryShared.hpp"
#include "classfile/vmClasses.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
+ #include "code/SCCache.hpp"
+ #include "compiler/compileBroker.hpp"
+ #include "compiler/precompiler.hpp"
#include "gc/shared/gcVMOperations.hpp"
#include "interpreter/bytecodeStream.hpp"
#include "interpreter/bytecodes.hpp"
#include "jvm_io.h"
#include "logging/log.hpp"
#include "memory/universe.hpp"
#include "nmt/memTracker.hpp"
#include "oops/compressedKlass.hpp"
#include "oops/instanceMirrorKlass.hpp"
#include "oops/klass.inline.hpp"
+ #include "oops/method.inline.hpp"
#include "oops/objArrayOop.hpp"
#include "oops/oop.inline.hpp"
#include "oops/oopHandle.hpp"
+ #include "oops/trainingData.hpp"
#include "prims/jvmtiExport.hpp"
+ #include "prims/whitebox.hpp"
#include "runtime/arguments.hpp"
#include "runtime/globals.hpp"
#include "runtime/globals_extension.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/javaCalls.hpp"
bool MetaspaceShared::_archive_loading_failed = false;
bool MetaspaceShared::_remapped_readwrite = false;
void* MetaspaceShared::_shared_metaspace_static_top = nullptr;
intx MetaspaceShared::_relocation_delta;
char* MetaspaceShared::_requested_base_address;
+ Array<Method*>* MetaspaceShared::_archived_method_handle_intrinsics = nullptr;
bool MetaspaceShared::_use_optimized_module_handling = true;
// The CDS archive is divided into the following regions:
// rw - read-write metadata
// ro - read-only metadata and read-only tables
return aligned_base;
}
void MetaspaceShared::initialize_for_static_dump() {
assert(CDSConfig::is_dumping_static_archive(), "sanity");
+
+ if (CDSConfig::is_dumping_preimage_static_archive() || CDSConfig::is_dumping_final_static_archive()) {
+ if (!((UseG1GC || UseParallelGC || UseSerialGC || UseEpsilonGC || UseShenandoahGC) && UseCompressedClassPointers)) {
+ vm_exit_during_initialization("Cannot create the CacheDataStore",
+ "UseCompressedClassPointers must be enabled, and collector must be G1, Parallel, Serial, Epsilon, or Shenandoah");
+ }
+ }
+
log_info(cds)("Core region alignment: " SIZE_FORMAT, core_region_alignment());
// The max allowed size for CDS archive. We use this to limit SharedBaseAddress
// to avoid address space wrap around.
size_t cds_max;
const size_t reserve_alignment = core_region_alignment();
}
}
static GrowableArrayCHeap<OopHandle, mtClassShared>* _extra_interned_strings = nullptr;
static GrowableArrayCHeap<Symbol*, mtClassShared>* _extra_symbols = nullptr;
+ static GrowableArray<Method*>* _method_handle_intrinsics = nullptr;
void MetaspaceShared::read_extra_data(JavaThread* current, const char* filename) {
_extra_interned_strings = new GrowableArrayCHeap<OopHandle, mtClassShared>(10000);
_extra_symbols = new GrowableArrayCHeap<Symbol*, mtClassShared>(1000);
}
}
}
}
+ void MetaspaceShared::make_method_handle_intrinsics_shareable() {
+ for (int i = 0; i < _method_handle_intrinsics->length(); i++) {
+ Method* m = ArchiveBuilder::current()->get_buffered_addr(_method_handle_intrinsics->at(i));
+ m->remove_unshareable_info();
+ // Each method has its own constant pool (which is distinct from m->method_holder()->constants());
+ m->constants()->remove_unshareable_info();
+ }
+ }
+
+ void MetaspaceShared::write_method_handle_intrinsics() {
+ int len = _method_handle_intrinsics->length();
+ _archived_method_handle_intrinsics = ArchiveBuilder::new_ro_array<Method*>(len);
+ for (int i = 0; i < len; i++) {
+ ArchiveBuilder::current()->write_pointer_in_buffer(_archived_method_handle_intrinsics->adr_at(i),
+ _method_handle_intrinsics->at(i));
+ }
+ log_info(cds)("Archived %d method handle intrinsics", len);
+ }
+
// Read/write a data stream for restoring/preserving metadata pointers and
// miscellaneous data from/to the shared archive file.
void MetaspaceShared::serialize(SerializeClosure* soc) {
int tag = 0;
// Dump/restore the symbol/string/subgraph_info tables
SymbolTable::serialize_shared_table_header(soc);
StringTable::serialize_shared_table_header(soc);
HeapShared::serialize_tables(soc);
SystemDictionaryShared::serialize_dictionary_headers(soc);
!
InstanceMirrorKlass::serialize_offsets(soc);
// Dump/restore well known classes (pointers)
SystemDictionaryShared::serialize_vm_classes(soc);
soc->do_tag(--tag);
CDS_JAVA_HEAP_ONLY(Modules::serialize(soc);)
CDS_JAVA_HEAP_ONLY(ClassLoaderDataShared::serialize(soc);)
LambdaFormInvokers::serialize(soc);
soc->do_tag(666);
}
// Dump/restore the symbol/string/subgraph_info tables
SymbolTable::serialize_shared_table_header(soc);
StringTable::serialize_shared_table_header(soc);
HeapShared::serialize_tables(soc);
SystemDictionaryShared::serialize_dictionary_headers(soc);
! AOTLinkedClassBulkLoader::serialize(soc, true);
+ FinalImageRecipes::serialize(soc, true);
+ TrainingData::serialize_training_data(soc);
InstanceMirrorKlass::serialize_offsets(soc);
// Dump/restore well known classes (pointers)
SystemDictionaryShared::serialize_vm_classes(soc);
soc->do_tag(--tag);
CDS_JAVA_HEAP_ONLY(Modules::serialize(soc);)
CDS_JAVA_HEAP_ONLY(ClassLoaderDataShared::serialize(soc);)
+ soc->do_ptr((void**)&_archived_method_handle_intrinsics);
+
LambdaFormInvokers::serialize(soc);
soc->do_tag(666);
}
virtual void iterate_roots(MetaspaceClosure* it) {
FileMapInfo::metaspace_pointers_do(it);
SystemDictionaryShared::dumptime_classes_do(it);
Universe::metaspace_pointers_do(it);
vmSymbols::metaspace_pointers_do(it);
+ TrainingData::iterate_roots(it);
// The above code should find all the symbols that are referenced by the
// archived classes. We just need to add the extra symbols which
// may not be used by any of the archived classes -- these are usually
// symbols that we anticipate to be used at run time, so we can store
if (_extra_symbols != nullptr) {
for (int i = 0; i < _extra_symbols->length(); i++) {
it->push(_extra_symbols->adr_at(i));
}
}
+
+ for (int i = 0; i < _method_handle_intrinsics->length(); i++) {
+ it->push(_method_handle_intrinsics->adr_at(i));
+ }
}
};
char* VM_PopulateDumpSharedSpace::dump_read_only_tables() {
ArchiveBuilder::OtherROAllocMark mark;
SystemDictionaryShared::write_to_archive();
+ AOTClassLinker::write_to_archive();
+ if (CDSConfig::is_dumping_preimage_static_archive()) {
+ FinalImageRecipes::record_recipes();
+ }
+ AOTLinkedClassBulkLoader::record_unregistered_classes();
+ TrainingData::dump_training_data();
+ MetaspaceShared::write_method_handle_intrinsics();
// Write lambform lines into archive
LambdaFormInvokers::dump_static_archive_invokers();
// Write module name into archive
CDS_JAVA_HEAP_ONLY(Modules::dump_main_module_name();)
return start;
}
void VM_PopulateDumpSharedSpace::doit() {
! guarantee(!CDSConfig::is_using_archive(), "We should not be using an archive when we dump");
DEBUG_ONLY(SystemDictionaryShared::NoClassLoadingMark nclm);
FileMapInfo::check_nonempty_dir_in_shared_path_table();
NOT_PRODUCT(SystemDictionary::verify();)
// Block concurrent class unloading from changing the _dumptime_table
MutexLocker ml(DumpTimeTable_lock, Mutex::_no_safepoint_check_flag);
! SystemDictionaryShared::check_excluded_classes();
_builder.gather_source_objs();
_builder.reserve_buffer();
CppVtables::dumptime_init(&_builder);
return start;
}
void VM_PopulateDumpSharedSpace::doit() {
! //guarantee(!CDSConfig::is_using_archive(), "We should not be using an archive when we dump");
DEBUG_ONLY(SystemDictionaryShared::NoClassLoadingMark nclm);
+ _method_handle_intrinsics = new (mtClassShared) GrowableArray<Method*>(256, mtClassShared);
+ SystemDictionary::get_all_method_handle_intrinsics(_method_handle_intrinsics);
+ _method_handle_intrinsics->sort([] (Method** a, Method** b) -> int {
+ Symbol* a_holder = (*a)->method_holder()->name();
+ Symbol* b_holder = (*b)->method_holder()->name();
+ if (a_holder != b_holder) {
+ return a_holder->cmp(b_holder);
+ }
+ Symbol* a_name = (*a)->name();
+ Symbol* b_name = (*b)->name();
+ if (a_name != b_name) {
+ return a_name->cmp(b_name);
+ }
+ Symbol* a_signature = (*a)->signature();
+ Symbol* b_signature = (*b)->signature();
+ if (a_signature != b_signature) {
+ return a_signature->cmp(b_signature);
+ }
+ return 0;
+ });
+
FileMapInfo::check_nonempty_dir_in_shared_path_table();
NOT_PRODUCT(SystemDictionary::verify();)
// Block concurrent class unloading from changing the _dumptime_table
MutexLocker ml(DumpTimeTable_lock, Mutex::_no_safepoint_check_flag);
!
+ SystemDictionaryShared::find_all_archivable_classes();
_builder.gather_source_objs();
_builder.reserve_buffer();
CppVtables::dumptime_init(&_builder);
dump_java_heap_objects(_builder.klasses());
dump_shared_symbol_table(_builder.symbols());
log_info(cds)("Make classes shareable");
_builder.make_klasses_shareable();
char* serialized_data = dump_read_only_tables();
SystemDictionaryShared::adjust_lambda_proxy_class_dictionary();
// The vtable clones contain addresses of the current process.
// We don't want to write these addresses into the archive.
CppVtables::zero_archived_vtables();
// Write the archive file
! const char* static_archive = CDSConfig::static_archive_path();
assert(static_archive != nullptr, "SharedArchiveFile not set?");
_map_info = new FileMapInfo(static_archive, true);
_map_info->populate_header(MetaspaceShared::core_region_alignment());
_map_info->set_serialized_data(serialized_data);
_map_info->set_cloned_vtables(CppVtables::vtables_serialized_base());
dump_java_heap_objects(_builder.klasses());
dump_shared_symbol_table(_builder.symbols());
log_info(cds)("Make classes shareable");
_builder.make_klasses_shareable();
+ MetaspaceShared::make_method_handle_intrinsics_shareable();
char* serialized_data = dump_read_only_tables();
SystemDictionaryShared::adjust_lambda_proxy_class_dictionary();
+ log_info(cds)("Make training data shareable");
+ _builder.make_training_data_shareable();
+
// The vtable clones contain addresses of the current process.
// We don't want to write these addresses into the archive.
CppVtables::zero_archived_vtables();
// Write the archive file
! const char* static_archive;
+ if (CDSConfig::is_dumping_final_static_archive()) {
+ static_archive = CacheDataStore;
+ assert(FileMapInfo::current_info() != nullptr, "sanity");
+ delete FileMapInfo::current_info();
+ } else {
+ static_archive = CDSConfig::static_archive_path();
+ }
assert(static_archive != nullptr, "SharedArchiveFile not set?");
_map_info = new FileMapInfo(static_archive, true);
_map_info->populate_header(MetaspaceShared::core_region_alignment());
_map_info->set_serialized_data(serialized_data);
_map_info->set_cloned_vtables(CppVtables::vtables_serialized_base());
};
// Check if we can eagerly link this class at dump time, so we can avoid the
// runtime linking overhead (especially verification)
bool MetaspaceShared::may_be_eagerly_linked(InstanceKlass* ik) {
+ if (CDSConfig::preserve_all_dumptime_verification_states(ik)) {
+ assert(ik->can_be_verified_at_dumptime(), "sanity");
+ }
if (!ik->can_be_verified_at_dumptime()) {
// For old classes, try to leave them in the unlinked state, so
// we can still store them in the archive. They must be
// linked/verified at runtime.
return false;
}
+
if (CDSConfig::is_dumping_dynamic_archive() && ik->is_shared_unregistered_class()) {
// Linking of unregistered classes at this stage may cause more
// classes to be resolved, resulting in calls to ClassLoader.loadClass()
// that may not be expected by custom class loaders.
//
return false;
}
return true;
}
- bool MetaspaceShared::link_class_for_cds(InstanceKlass* ik, TRAPS) {
- // Link the class to cause the bytecodes to be rewritten and the
- // cpcache to be created. Class verification is done according
- // to -Xverify setting.
- bool res = MetaspaceShared::try_link_class(THREAD, ik);
- ClassPrelinker::dumptime_resolve_constants(ik, CHECK_(false));
- return res;
- }
-
void MetaspaceShared::link_shared_classes(bool jcmd_request, TRAPS) {
! ClassPrelinker::initialize();
!
! if (!jcmd_request) {
LambdaFormInvokers::regenerate_holder_classes(CHECK);
}
// Collect all loaded ClassLoaderData.
CollectCLDClosure collect_cld(THREAD);
return false;
}
return true;
}
void MetaspaceShared::link_shared_classes(bool jcmd_request, TRAPS) {
! AOTClassLinker::initialize();
!
! if (!jcmd_request && !CDSConfig::is_dumping_dynamic_archive()
+ && !CDSConfig::is_dumping_preimage_static_archive()
+ && !CDSConfig::is_dumping_final_static_archive()) {
+ // If we have regenerated invoker classes in the dynamic archive,
+ // they will conflict with the resolved CONSTANT_Klass references that are stored
+ // in the static archive. This is not easy to handle. Let's disable
+ // it for dynamic archive for now.
LambdaFormInvokers::regenerate_holder_classes(CHECK);
}
// Collect all loaded ClassLoaderData.
CollectCLDClosure collect_cld(THREAD);
ClassLoaderData* cld = collect_cld.cld_at(i);
for (Klass* klass = cld->klasses(); klass != nullptr; klass = klass->next_link()) {
if (klass->is_instance_klass()) {
InstanceKlass* ik = InstanceKlass::cast(klass);
if (may_be_eagerly_linked(ik)) {
! has_linked |= link_class_for_cds(ik, CHECK);
}
}
}
}
ClassLoaderData* cld = collect_cld.cld_at(i);
for (Klass* klass = cld->klasses(); klass != nullptr; klass = klass->next_link()) {
if (klass->is_instance_klass()) {
InstanceKlass* ik = InstanceKlass::cast(klass);
if (may_be_eagerly_linked(ik)) {
! has_linked |= try_link_class(THREAD, ik);
+ }
+ if (CDSConfig::is_dumping_heap() && ik->is_linked() && !ik->is_initialized()) {
+ AOTClassInitializer::maybe_preinit_class(ik, CHECK);
}
}
}
}
break;
}
// Class linking includes verification which may load more classes.
// Keep scanning until we have linked no more classes.
}
+
+ // Resolve constant pool entries -- we don't load any new classes during this stage
+ for (int i = 0; i < collect_cld.nof_cld(); i++) {
+ ClassLoaderData* cld = collect_cld.cld_at(i);
+ for (Klass* klass = cld->klasses(); klass != nullptr; klass = klass->next_link()) {
+ if (klass->is_instance_klass()) {
+ InstanceKlass* ik = InstanceKlass::cast(klass);
+ AOTConstantPoolResolver::dumptime_resolve_constants(ik, CHECK);
+ if (CDSConfig::is_dumping_preimage_static_archive()) {
+ FinalImageRecipes::add_reflection_data_flags(ik, CHECK);
+ }
+ }
+ }
+ }
+
+ if (CDSConfig::is_dumping_preimage_static_archive()) {
+ // Do this after all classes are verified by the above loop.
+ // Any classes loaded from here on will be automatically excluded, so
+ // there's no need to force verification or resolve CP entries.
+ RecordTraining = false;
+ SystemDictionaryShared::ignore_new_classes();
+ LambdaFormInvokers::regenerate_holder_classes(CHECK);
+ RecordTraining = true;
+ }
+
+ if (CDSConfig::is_dumping_final_static_archive()) {
+ FinalImageRecipes::apply_recipes(CHECK);
+ }
}
void MetaspaceShared::prepare_for_dumping() {
assert(CDSConfig::is_dumping_archive(), "sanity");
CDSConfig::check_unsupported_dumping_module_options();
// Preload classes from a list, populate the shared spaces and dump to a
// file.
void MetaspaceShared::preload_and_dump(TRAPS) {
ResourceMark rm(THREAD);
+ HandleMark hm(THREAD);
+
+ if (CDSConfig::is_dumping_final_static_archive() && PrintTrainingInfo) {
+ tty->print_cr("==================== archived_training_data ** before dumping ====================");
+ TrainingData::print_archived_training_data_on(tty);
+ }
+
StaticArchiveBuilder builder;
preload_and_dump_impl(builder, THREAD);
if (HAS_PENDING_EXCEPTION) {
if (PENDING_EXCEPTION->is_a(vmClasses::OutOfMemoryError_klass())) {
log_error(cds)("Out of memory. Please run with a larger Java heap, current MaxHeapSize = "
log_info(cds)("Loading classes to share: done.");
}
void MetaspaceShared::preload_and_dump_impl(StaticArchiveBuilder& builder, TRAPS) {
! preload_classes(CHECK);
! if (SharedArchiveConfigFile) {
! log_info(cds)("Reading extra data from %s ...", SharedArchiveConfigFile);
! read_extra_data(THREAD, SharedArchiveConfigFile);
! log_info(cds)("Reading extra data: done.");
}
// Rewrite and link classes
log_info(cds)("Rewriting and linking classes ...");
-
// Link any classes which got missed. This would happen if we have loaded classes that
// were not explicitly specified in the classlist. E.g., if an interface implemented by class K
// fails verification, all other interfaces that were not specified in the classlist but
// are implemented by K are not verified.
link_shared_classes(false/*not from jcmd*/, CHECK);
log_info(cds)("Rewriting and linking classes: done");
#if INCLUDE_CDS_JAVA_HEAP
if (CDSConfig::is_dumping_heap()) {
if (!HeapShared::is_archived_boot_layer_available(THREAD)) {
log_info(cds)("archivedBootLayer not available, disabling full module graph");
CDSConfig::stop_dumping_full_module_graph();
log_info(cds)("Loading classes to share: done.");
}
void MetaspaceShared::preload_and_dump_impl(StaticArchiveBuilder& builder, TRAPS) {
! if (CDSConfig::is_dumping_classic_static_archive()) {
+ // We are running with -Xshare:dump
+ preload_classes(CHECK);
+
+ if (SharedArchiveConfigFile) {
+ log_info(cds)("Reading extra data from %s ...", SharedArchiveConfigFile);
+ read_extra_data(THREAD, SharedArchiveConfigFile);
+ log_info(cds)("Reading extra data: done.");
+ }
+ }
! if (CDSConfig::is_dumping_preimage_static_archive()) {
! log_info(cds)("Reading lambda form invokers of in JDK default classlist ...");
! char default_classlist[JVM_MAXPATHLEN];
! get_default_classlist(default_classlist, sizeof(default_classlist));
+ struct stat statbuf;
+ if (os::stat(default_classlist, &statbuf) == 0) {
+ ClassListParser::parse_classlist(default_classlist,
+ ClassListParser::_parse_lambda_forms_invokers_only, CHECK);
+ }
}
// Rewrite and link classes
log_info(cds)("Rewriting and linking classes ...");
// Link any classes which got missed. This would happen if we have loaded classes that
// were not explicitly specified in the classlist. E.g., if an interface implemented by class K
// fails verification, all other interfaces that were not specified in the classlist but
// are implemented by K are not verified.
link_shared_classes(false/*not from jcmd*/, CHECK);
log_info(cds)("Rewriting and linking classes: done");
+ if (CDSConfig::is_dumping_final_static_archive()) {
+ assert(RecordTraining == false, "must be");
+ RecordTraining = true;
+ }
+
+ TrainingData::init_dumptime_table(CHECK); // captures TrainingDataSetLocker
+
#if INCLUDE_CDS_JAVA_HEAP
if (CDSConfig::is_dumping_heap()) {
if (!HeapShared::is_archived_boot_layer_available(THREAD)) {
log_info(cds)("archivedBootLayer not available, disabling full module graph");
CDSConfig::stop_dumping_full_module_graph();
ArchiveHeapWriter::init();
if (CDSConfig::is_dumping_full_module_graph()) {
HeapShared::reset_archived_object_states(CHECK);
}
+ if (ArchiveLoaderLookupCache) {
+ SystemDictionaryShared::create_loader_positive_lookup_cache(CHECK);
+ }
+
+ if (CDSConfig::is_dumping_invokedynamic()) {
+ // This makes sure that the MethodType and MethodTypeForm tables won't be updated
+ // concurrently when we are saving their contents into a side table.
+ assert(CDSConfig::allow_only_single_java_thread(), "Required");
+
+ JavaValue result(T_VOID);
+ JavaCalls::call_static(&result, vmClasses::MethodType_klass(),
+ vmSymbols::createArchivedObjects(),
+ vmSymbols::void_method_signature(),
+ CHECK);
+ }
+
// Do this at the very end, when no Java code will be executed. Otherwise
// some new strings may be added to the intern table.
StringTable::allocate_shared_strings_array(CHECK);
} else {
log_info(cds)("Not dumping heap, reset CDSConfig::_is_using_optimized_module_handling");
path_string,
CHECK);
VM_PopulateDumpSharedSpace op(builder);
VMThread::execute(&op);
! if (!write_static_archive(&builder, op.map_info(), op.heap_info())) {
THROW_MSG(vmSymbols::java_io_IOException(), "Encountered error while dumping");
}
}
bool MetaspaceShared::write_static_archive(ArchiveBuilder* builder, FileMapInfo* map_info, ArchiveHeapInfo* heap_info) {
path_string,
CHECK);
VM_PopulateDumpSharedSpace op(builder);
VMThread::execute(&op);
+ FileMapInfo* mapinfo = op.map_info();
+ ArchiveHeapInfo* heap_info = op.heap_info();
+ bool status;
+ if (CDSConfig::is_dumping_preimage_static_archive()) {
+ if ((status = write_static_archive(&builder, mapinfo, heap_info))) {
+ fork_and_dump_final_static_archive();
+ }
+ } else if (CDSConfig::is_dumping_final_static_archive()) {
+ RecordTraining = false;
+ if (StoreCachedCode && CachedCodeFile != nullptr) { // FIXME: new workflow -- remove the CachedCodeFile flag
+ if (log_is_enabled(Info, cds, jit)) {
+ CDSAccess::test_heap_access_api();
+ }
+
+ // We have just created the final image. Let's run the AOT compiler
+ if (PrintTrainingInfo) {
+ tty->print_cr("==================== archived_training_data ** after dumping ====================");
+ TrainingData::print_archived_training_data_on(tty);
+ }
+
+ CDSConfig::enable_dumping_cached_code();
+ {
+ builder.start_cc_region();
+ Precompiler::compile_cached_code(&builder, CHECK);
+ builder.end_cc_region();
+ }
+ CDSConfig::disable_dumping_cached_code();
! SCCache::close(); // Write final data and close archive
+ }
+ status = write_static_archive(&builder, mapinfo, heap_info);
+ } else {
+ status = write_static_archive(&builder, mapinfo, heap_info);
+ }
+
+ if (!status) {
THROW_MSG(vmSymbols::java_io_IOException(), "Encountered error while dumping");
}
}
bool MetaspaceShared::write_static_archive(ArchiveBuilder* builder, FileMapInfo* map_info, ArchiveHeapInfo* heap_info) {
"for testing purposes only and should not be used in a production environment");
}
return true;
}
// Returns true if the class's status has changed.
bool MetaspaceShared::try_link_class(JavaThread* current, InstanceKlass* ik) {
ExceptionMark em(current);
JavaThread* THREAD = current; // For exception macros.
assert(CDSConfig::is_dumping_archive(), "sanity");
! if (!ik->is_shared() && ik->is_loaded() && !ik->is_linked() && ik->can_be_verified_at_dumptime() &&
!SystemDictionaryShared::has_class_failed_verification(ik)) {
bool saved = BytecodeVerificationLocal;
if (ik->is_shared_unregistered_class() && ik->class_loader() == nullptr) {
// The verification decision is based on BytecodeVerificationRemote
// for non-system classes. Since we are using the null classloader
"for testing purposes only and should not be used in a production environment");
}
return true;
}
+ static void print_java_launcher(outputStream* st) {
+ st->print("%s%sbin%sjava", Arguments::get_java_home(), os::file_separator(), os::file_separator());
+ }
+
+ static void print_vm_arguments(outputStream* st) {
+ const char* cp = Arguments::get_appclasspath();
+ if (cp != nullptr && strlen(cp) > 0 && strcmp(cp, ".") != 0) {
+ st->print(" -cp "); st->print_raw(cp);
+ }
+ for (int i = 0; i < Arguments::num_jvm_flags(); i++) {
+ st->print(" %s", Arguments::jvm_flags_array()[i]);
+ }
+ for (int i = 0; i < Arguments::num_jvm_args(); i++) {
+ st->print(" %s", Arguments::jvm_args_array()[i]);
+ }
+ }
+
+ void MetaspaceShared::fork_and_dump_final_static_archive() {
+ assert(CDSConfig::is_dumping_preimage_static_archive(), "sanity");
+
+ ResourceMark rm;
+ stringStream ss;
+ print_java_launcher(&ss);
+ print_vm_arguments(&ss);
+ ss.print(" -XX:CDSPreimage=%s", SharedArchiveFile);
+
+ const char* cmd = ss.freeze();
+ if (CDSManualFinalImage) {
+ tty->print_cr("-XX:+CDSManualFinalImage is specified");
+ tty->print_cr("Please manually execute the following command to create the final CDS image:");
+ tty->print(" "); tty->print_raw_cr(cmd);
+
+ // The following is useful if the dumping was trigger by a script that builds
+ // a complex command-line.
+ tty->print_cr("Note: to recreate the preimage only:");
+ tty->print_cr(" rm -f %s", CacheDataStore);
+ tty->print(" ");
+ print_java_launcher(tty);
+ print_vm_arguments(tty);
+ if (Arguments::java_command() != nullptr) {
+ tty->print(" %s", Arguments::java_command());
+ }
+ tty->cr();
+ } else {
+ // FIXME: space characters are not properly quoated. E.g.,
+ // java -Dfoo='a b c' HelloWorld
+ log_info(cds)("Launching child process to create final CDS image:");
+ log_info(cds)(" %s", cmd);
+ int status = os::fork_and_exec(cmd);
+ if (status != 0) {
+ log_error(cds)("Child process finished; status = %d", status);
+ log_error(cds)("To reproduce the error");
+ ResourceMark rm;
+ LogStream ls(Log(cds)::error());
+ ls.print(" "); ls.print_raw_cr(cmd);
+
+ // The following is useful if the dumping was trigger by a script that builds
+ // a complex command-line.
+ ls.print_cr("Note: to recreate the preimage only:");
+ ls.print_cr(" rm -f %s", CacheDataStore);
+ ls.print(" ");
+ print_java_launcher(&ls);
+ print_vm_arguments(&ls);
+ ls.print(" -XX:+UnlockDiagnosticVMOptions -XX:+CDSManualFinalImage");
+ if (Arguments::java_command() != nullptr) {
+ ls.print(" %s", Arguments::java_command());
+ }
+ ls.cr();
+
+ vm_direct_exit(status);
+ } else {
+ log_info(cds)("Child process finished; status = %d", status);
+ // On Windows, need WRITE permission to remove the file.
+ WINDOWS_ONLY(chmod(SharedArchiveFile, _S_IREAD | _S_IWRITE));
+ status = remove(SharedArchiveFile);
+ if (status != 0) {
+ log_error(cds)("Failed to remove CDSPreimage file %s", SharedArchiveFile);
+ } else {
+ log_info(cds)("Removed CDSPreimage file %s", SharedArchiveFile);
+ }
+ }
+ }
+ }
+
// Returns true if the class's status has changed.
bool MetaspaceShared::try_link_class(JavaThread* current, InstanceKlass* ik) {
ExceptionMark em(current);
JavaThread* THREAD = current; // For exception macros.
assert(CDSConfig::is_dumping_archive(), "sanity");
!
+ if (ik->is_shared() && !CDSConfig::is_dumping_final_static_archive()) {
+ assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
+ return false;
+ }
+
+ if (ik->is_loaded() && !ik->is_linked() && ik->can_be_verified_at_dumptime() &&
!SystemDictionaryShared::has_class_failed_verification(ik)) {
bool saved = BytecodeVerificationLocal;
if (ik->is_shared_unregistered_class() && ik->class_loader() == nullptr) {
// The verification decision is based on BytecodeVerificationRemote
// for non-system classes. Since we are using the null classloader
}
#if INCLUDE_CDS_JAVA_HEAP
void VM_PopulateDumpSharedSpace::dump_java_heap_objects(GrowableArray<Klass*>* klasses) {
if(!HeapShared::can_write()) {
! log_info(cds)(
"Archived java heap is not supported as UseG1GC "
"and UseCompressedClassPointers are required."
"Current settings: UseG1GC=%s, UseCompressedClassPointers=%s.",
BOOL_TO_STR(UseG1GC), BOOL_TO_STR(UseCompressedClassPointers));
return;
}
// Find all the interned strings that should be dumped.
int i;
for (i = 0; i < klasses->length(); i++) {
}
#if INCLUDE_CDS_JAVA_HEAP
void VM_PopulateDumpSharedSpace::dump_java_heap_objects(GrowableArray<Klass*>* klasses) {
if(!HeapShared::can_write()) {
! if (!CDSConfig::is_dumping_preimage_static_archive()) {
+ log_info(cds)(
"Archived java heap is not supported as UseG1GC "
"and UseCompressedClassPointers are required."
"Current settings: UseG1GC=%s, UseCompressedClassPointers=%s.",
BOOL_TO_STR(UseG1GC), BOOL_TO_STR(UseCompressedClassPointers));
+ }
return;
}
// Find all the interned strings that should be dumped.
int i;
for (i = 0; i < klasses->length(); i++) {
void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
assert(CDSConfig::is_using_archive(), "Must be called when UseSharedSpaces is enabled");
MapArchiveResult result = MAP_ARCHIVE_OTHER_FAILURE;
! FileMapInfo* static_mapinfo = open_static_archive();
FileMapInfo* dynamic_mapinfo = nullptr;
if (static_mapinfo != nullptr) {
log_info(cds)("Core region alignment: " SIZE_FORMAT, static_mapinfo->core_region_alignment());
dynamic_mapinfo = open_dynamic_archive();
void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
assert(CDSConfig::is_using_archive(), "Must be called when UseSharedSpaces is enabled");
MapArchiveResult result = MAP_ARCHIVE_OTHER_FAILURE;
! FileMapInfo* static_mapinfo = FileMapInfo::current_info();
FileMapInfo* dynamic_mapinfo = nullptr;
if (static_mapinfo != nullptr) {
log_info(cds)("Core region alignment: " SIZE_FORMAT, static_mapinfo->core_region_alignment());
dynamic_mapinfo = open_dynamic_archive();
log_info(cds)("Unable to map shared spaces");
if (PrintSharedArchiveAndExit) {
MetaspaceShared::unrecoverable_loading_error("Unable to use shared archive.");
} else if (RequireSharedSpaces) {
MetaspaceShared::unrecoverable_loading_error("Unable to map shared spaces");
+ } else if (CDSConfig::is_dumping_final_static_archive()) {
+ assert(CDSPreimage != nullptr, "must be");
+ log_error(cds)("Unable to map shared spaces for CDSPreimage = %s", CDSPreimage);
+ MetaspaceShared::unrecoverable_loading_error();
}
}
// If mapping failed and -XShare:on, the vm should exit
bool has_failed = false;
if (dynamic_mapinfo != nullptr && !dynamic_mapinfo->is_mapped()) {
has_failed = true;
delete dynamic_mapinfo;
}
if (RequireSharedSpaces && has_failed) {
MetaspaceShared::unrecoverable_loading_error("Unable to map shared spaces");
}
}
! FileMapInfo* MetaspaceShared::open_static_archive() {
const char* static_archive = CDSConfig::static_archive_path();
assert(static_archive != nullptr, "sanity");
FileMapInfo* mapinfo = new FileMapInfo(static_archive, true);
if (!mapinfo->initialize()) {
delete(mapinfo);
! return nullptr;
}
- return mapinfo;
}
FileMapInfo* MetaspaceShared::open_dynamic_archive() {
if (CDSConfig::is_dumping_dynamic_archive()) {
return nullptr;
if (dynamic_mapinfo != nullptr && !dynamic_mapinfo->is_mapped()) {
has_failed = true;
delete dynamic_mapinfo;
}
if (RequireSharedSpaces && has_failed) {
+ // static archive mapped but dynamic archive failed
MetaspaceShared::unrecoverable_loading_error("Unable to map shared spaces");
}
}
! // This is called very early at VM start up to get the size of the cached_code region, which
+ // is used in CodeCache::initialize_heaps()
+ void MetaspaceShared::open_static_archive() {
+ if (!UseSharedSpaces) {
+ return;
+ }
const char* static_archive = CDSConfig::static_archive_path();
assert(static_archive != nullptr, "sanity");
FileMapInfo* mapinfo = new FileMapInfo(static_archive, true);
if (!mapinfo->initialize()) {
delete(mapinfo);
! } else {
+ FileMapRegion* r = mapinfo->region_at(MetaspaceShared::cc);
+ CDSAccess::set_cached_code_size(r->used_aligned());
}
}
FileMapInfo* MetaspaceShared::open_dynamic_archive() {
if (CDSConfig::is_dumping_dynamic_archive()) {
return nullptr;
assert(CDSConfig::is_using_archive(), "must be runtime");
if (mapinfo == nullptr) {
return MAP_ARCHIVE_SUCCESS; // The dynamic archive has not been specified. No error has happened -- trivially succeeded.
}
+ if (!mapinfo->validate_aot_class_linking()) {
+ return MAP_ARCHIVE_OTHER_FAILURE;
+ }
+
mapinfo->set_is_mapped(false);
if (mapinfo->core_region_alignment() != (size_t)core_region_alignment()) {
log_info(cds)("Unable to map CDS archive -- core_region_alignment() expected: " SIZE_FORMAT
" actual: " SIZE_FORMAT, mapinfo->core_region_alignment(), core_region_alignment());
return MAP_ARCHIVE_OTHER_FAILURE;
// Finish up archived heap initialization. These must be
// done after ReadClosure.
static_mapinfo->patch_heap_embedded_pointers();
ArchiveHeapLoader::finish_initialization();
Universe::load_archived_object_instances();
+ SCCache::new_workflow_load_cache();
// Close the mapinfo file
static_mapinfo->close();
static_mapinfo->unmap_region(MetaspaceShared::bm);
DynamicArchive::setup_array_klasses();
dynamic_mapinfo->close();
dynamic_mapinfo->unmap_region(MetaspaceShared::bm);
}
+ log_info(cds)("Using AOT-linked classes: %s (%s%s)",
+ CDSConfig::is_using_aot_linked_classes() ? "true" : "false",
+ static_mapinfo->header()->has_aot_linked_classes() ? "static archive: true" : "static archive: false",
+ (dynamic_mapinfo == nullptr) ? "" :
+ (dynamic_mapinfo->header()->has_aot_linked_classes() ? ", dynamic archive: true" : ", dynamic archive: false"));
+
// Set up LambdaFormInvokers::_lambdaform_lines for dynamic dump
if (CDSConfig::is_dumping_dynamic_archive()) {
// Read stored LF format lines stored in static archive
LambdaFormInvokers::read_static_archive_invokers();
}
if (dynamic_mapinfo != nullptr) {
tty->print_cr("\n\nDynamic archive name: %s", dynamic_mapinfo->full_path());
tty->print_cr("Dynamic archive version %d", dynamic_mapinfo->version());
SystemDictionaryShared::print_shared_archive(tty, false/*dynamic*/);
}
+ TrainingData::print_archived_training_data_on(tty);
+
+ if (LoadCachedCode) {
+ tty->print_cr("\n\nCached Code file: %s", CachedCodeFile);
+ SCCache::print_on(tty);
+ }
// collect shared symbols and strings
CountSharedSymbols cl;
SymbolTable::shared_symbols_do(&cl);
tty->print_cr("Number of shared symbols: %d", cl.total());
< prev index next >