< prev index next >

src/hotspot/share/cds/heapShared.cpp

Print this page

  46 #include "classfile/javaClasses.inline.hpp"
  47 #include "classfile/modules.hpp"
  48 #include "classfile/stringTable.hpp"
  49 #include "classfile/symbolTable.hpp"
  50 #include "classfile/systemDictionary.hpp"
  51 #include "classfile/systemDictionaryShared.hpp"
  52 #include "classfile/vmClasses.hpp"
  53 #include "classfile/vmSymbols.hpp"
  54 #include "gc/shared/collectedHeap.hpp"
  55 #include "gc/shared/gcLocker.hpp"
  56 #include "gc/shared/gcVMOperations.hpp"
  57 #include "logging/log.hpp"
  58 #include "logging/logStream.hpp"
  59 #include "memory/iterator.inline.hpp"
  60 #include "memory/resourceArea.hpp"
  61 #include "memory/universe.hpp"
  62 #include "oops/compressedOops.inline.hpp"
  63 #include "oops/fieldStreams.inline.hpp"
  64 #include "oops/objArrayOop.inline.hpp"
  65 #include "oops/oop.inline.hpp"

  66 #include "oops/oopHandle.inline.hpp"
  67 #include "oops/typeArrayOop.inline.hpp"
  68 #include "prims/jvmtiExport.hpp"
  69 #include "runtime/arguments.hpp"
  70 #include "runtime/fieldDescriptor.inline.hpp"
  71 #include "runtime/globals_extension.hpp"
  72 #include "runtime/init.hpp"
  73 #include "runtime/javaCalls.hpp"
  74 #include "runtime/mutexLocker.hpp"
  75 #include "runtime/safepointVerifiers.hpp"
  76 #include "utilities/bitMap.inline.hpp"
  77 #include "utilities/copy.hpp"
  78 #if INCLUDE_G1GC
  79 #include "gc/g1/g1CollectedHeap.hpp"
  80 #endif
  81 
  82 #if INCLUDE_CDS_JAVA_HEAP
  83 
  84 struct ArchivableStaticFieldInfo {
  85   const char* klass_name;

 579     MutexLocker ml(ScratchObjects_lock, Mutex::_no_safepoint_check_flag);
 580     OopHandle* handle = get(ptr);
 581     if (handle != nullptr) {
 582       handle->release(Universe::vm_global());
 583       remove(ptr);
 584     }
 585   }
 586 };
 587 
 588 void HeapShared::add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) {
 589   if (CDSConfig::is_dumping_preimage_static_archive() && scratch_resolved_references(src) != nullptr) {
 590     // We are in AOT training run. The class has been redefined and we are giving it a new resolved_reference.
 591     // Ignore it, as this class will be excluded from the AOT config.
 592     return;
 593   }
 594   if (SystemDictionaryShared::is_builtin_loader(src->pool_holder()->class_loader_data())) {
 595     _scratch_objects_table->set_oop(src, dest);
 596   }
 597 }
 598 
 599 objArrayOop HeapShared::scratch_resolved_references(ConstantPool* src) {
 600   return (objArrayOop)_scratch_objects_table->get_oop(src);

 601 }
 602 
 603  void HeapShared::init_dumping() {
 604    _scratch_objects_table = new (mtClass)MetaspaceObjToOopHandleTable();
 605    _pending_roots = new GrowableArrayCHeap<oop, mtClassShared>(500);
 606 }
 607 
 608 void HeapShared::init_scratch_objects_for_basic_type_mirrors(TRAPS) {
 609   for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
 610     BasicType bt = (BasicType)i;
 611     if (!is_reference_type(bt)) {
 612       oop m = java_lang_Class::create_basic_type_mirror(type2name(bt), bt, CHECK);
 613       _scratch_basic_type_mirrors[i] = OopHandle(Universe::vm_global(), m);
 614     }
 615   }
 616 }
 617 
 618 // Given java_mirror that represents a (primitive or reference) type T,
 619 // return the "scratch" version that represents the same type T. Note
 620 // that java_mirror will be returned if the mirror is already a scratch mirror.

 769     assert(success, "sanity");
 770   }
 771 
 772   if (log_is_enabled(Debug, aot, init)) {
 773     ResourceMark rm;
 774     log_debug(aot, init)("copied %3d field(s) in aot-initialized mirror %s%s%s", nfields, ik->external_name(),
 775                          ik->is_hidden() ? " (hidden)" : "",
 776                          ik->is_enum_subclass() ? " (enum)" : "");
 777   }
 778 }
 779 
 780 void HeapShared::copy_java_mirror(oop orig_mirror, oop scratch_m) {
 781   // We need to retain the identity_hash, because it may have been used by some hashtables
 782   // in the shared heap.
 783   if (!orig_mirror->fast_no_hash_check()) {
 784     intptr_t src_hash = orig_mirror->identity_hash();
 785     if (UseCompactObjectHeaders) {
 786       narrowKlass nk = CompressedKlassPointers::encode(orig_mirror->klass());
 787       scratch_m->set_mark(markWord::prototype().set_narrow_klass(nk).copy_set_hash(src_hash));
 788     } else {

 789       scratch_m->set_mark(markWord::prototype().copy_set_hash(src_hash));
 790     }
 791     assert(scratch_m->mark().is_unlocked(), "sanity");
 792 
 793     DEBUG_ONLY(intptr_t archived_hash = scratch_m->identity_hash());
 794     assert(src_hash == archived_hash, "Different hash codes: original " INTPTR_FORMAT ", archived " INTPTR_FORMAT, src_hash, archived_hash);
 795   }
 796 



















 797   if (CDSConfig::is_dumping_aot_linked_classes()) {
 798     java_lang_Class::set_module(scratch_m, java_lang_Class::module(orig_mirror));
 799     java_lang_Class::set_protection_domain(scratch_m, java_lang_Class::protection_domain(orig_mirror));
 800   }
 801 }
 802 
 803 static objArrayOop get_archived_resolved_references(InstanceKlass* src_ik) {
 804   if (SystemDictionaryShared::is_builtin_loader(src_ik->class_loader_data())) {
 805     objArrayOop rr = src_ik->constants()->resolved_references_or_null();
 806     if (rr != nullptr && !HeapShared::is_too_large_to_archive(rr)) {
 807       return HeapShared::scratch_resolved_references(src_ik->constants());
 808     }
 809   }
 810   return nullptr;
 811 }
 812 
 813 int HeapShared::archive_exception_instance(oop exception) {
 814   bool success = archive_reachable_objects_from(1, _dump_time_special_subgraph, exception);
 815   assert(success, "sanity");
 816   return append_root(exception);

 992     // the original Klass*
 993     if (orig_k == vmClasses::String_klass() ||
 994         orig_k == vmClasses::Object_klass()) {
 995       // Initialized early during VM initialization. No need to be added
 996       // to the sub-graph object class list.
 997       return;
 998     }
 999     check_allowed_klass(InstanceKlass::cast(orig_k));
1000   } else if (orig_k->is_objArray_klass()) {
1001     Klass* abk = ObjArrayKlass::cast(orig_k)->bottom_klass();
1002     if (abk->is_instance_klass()) {
1003       assert(InstanceKlass::cast(abk)->defined_by_boot_loader(),
1004             "must be boot class");
1005       check_allowed_klass(InstanceKlass::cast(ObjArrayKlass::cast(orig_k)->bottom_klass()));
1006     }
1007     if (orig_k == Universe::objectArrayKlass()) {
1008       // Initialized early during Universe::genesis. No need to be added
1009       // to the list.
1010       return;
1011     }



1012   } else {
1013     assert(orig_k->is_typeArray_klass(), "must be");
1014     // Primitive type arrays are created early during Universe::genesis.
1015     return;
1016   }
1017 
1018   if (log_is_enabled(Debug, aot, heap)) {
1019     if (!_subgraph_object_klasses->contains(orig_k)) {
1020       ResourceMark rm;
1021       log_debug(aot, heap)("Adding klass %s", orig_k->external_name());
1022     }
1023   }
1024 
1025   _subgraph_object_klasses->append_if_missing(orig_k);
1026   _has_non_early_klasses |= is_non_early_klass(orig_k);
1027 }
1028 
1029 void KlassSubGraphInfo::check_allowed_klass(InstanceKlass* ik) {
1030 #ifndef PRODUCT
1031   if (AOTClassInitializer::has_test_class()) {

1424         log_info(aot, heap)("subgraph %s cannot be used because full module graph is disabled",
1425                             k->external_name());
1426       }
1427       return nullptr;
1428     }
1429 
1430     if (record->has_non_early_klasses() && JvmtiExport::should_post_class_file_load_hook()) {
1431       if (log_is_enabled(Info, aot, heap)) {
1432         ResourceMark rm(THREAD);
1433         log_info(aot, heap)("subgraph %s cannot be used because JVMTI ClassFileLoadHook is enabled",
1434                             k->external_name());
1435       }
1436       return nullptr;
1437     }
1438 
1439     if (log_is_enabled(Info, aot, heap)) {
1440       ResourceMark rm;
1441       log_info(aot, heap)("%s subgraph %s ", do_init ? "init" : "resolve", k->external_name());
1442     }
1443 














1444     resolve_or_init(k, do_init, CHECK_NULL);
1445 
1446     // Load/link/initialize the klasses of the objects in the subgraph.
1447     // nullptr class loader is used.
1448     Array<Klass*>* klasses = record->subgraph_object_klasses();
1449     if (klasses != nullptr) {
1450       for (int i = 0; i < klasses->length(); i++) {
1451         Klass* klass = klasses->at(i);
1452         if (!klass->in_aot_cache()) {
1453           return nullptr;
1454         }
1455         resolve_or_init(klass, do_init, CHECK_NULL);
1456       }
1457     }
1458   }
1459 
1460   return record;
1461 }
1462 
1463 void HeapShared::resolve_or_init(const char* klass_name, bool do_init, TRAPS) {
1464   TempNewSymbol klass_name_sym =  SymbolTable::new_symbol(klass_name);
1465   InstanceKlass* k = SystemDictionaryShared::find_builtin_class(klass_name_sym);
1466   if (k == nullptr) {
1467     return;
1468   }
1469   assert(k->defined_by_boot_loader(), "sanity");
1470   resolve_or_init(k, false, CHECK);
1471   if (do_init) {
1472     resolve_or_init(k, true, CHECK);
1473   }
1474 }
1475 
1476 void HeapShared::resolve_or_init(Klass* k, bool do_init, TRAPS) {
1477   if (!do_init) {
1478     if (k->class_loader_data() == nullptr) {
1479       Klass* resolved_k = SystemDictionary::resolve_or_null(k->name(), CHECK);
1480       assert(resolved_k == k, "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");




1481     }
1482   } else {
1483     assert(k->class_loader_data() != nullptr, "must have been resolved by HeapShared::resolve_classes");
1484     if (k->is_instance_klass()) {
1485       InstanceKlass* ik = InstanceKlass::cast(k);
1486       ik->initialize(CHECK);
1487     } else if (k->is_objArray_klass()) {
1488       ObjArrayKlass* oak = ObjArrayKlass::cast(k);
1489       oak->initialize(CHECK);
1490     }
1491   }
1492 }
1493 
1494 void HeapShared::init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record) {
1495   verify_the_heap(k, "before");
1496 
1497   Array<int>* entry_field_records = record->entry_field_records();
1498   if (entry_field_records != nullptr) {
1499     int efr_len = entry_field_records->length();
1500     assert(efr_len % 2 == 0, "sanity");

1922   }
1923 }
1924 #endif
1925 
1926 void HeapShared::check_special_subgraph_classes() {
1927   if (CDSConfig::is_dumping_aot_linked_classes()) {
1928     // We can have aot-initialized classes (such as Enums) that can reference objects
1929     // of arbitrary types. Currently, we trust the JEP 483 implementation to only
1930     // aot-initialize classes that are "safe".
1931     //
1932     // TODO: we need an automatic tool that checks the safety of aot-initialized
1933     // classes (when we extend the set of aot-initialized classes beyond JEP 483)
1934     return;
1935   } else {
1936     // In this case, the special subgraph should contain a few specific types
1937     GrowableArray<Klass*>* klasses = _dump_time_special_subgraph->subgraph_object_klasses();
1938     int num = klasses->length();
1939     for (int i = 0; i < num; i++) {
1940       Klass* subgraph_k = klasses->at(i);
1941       Symbol* name = subgraph_k->name();
1942       if (subgraph_k->is_instance_klass() &&

1943           name != vmSymbols::java_lang_Class() &&
1944           name != vmSymbols::java_lang_String() &&
1945           name != vmSymbols::java_lang_ArithmeticException() &&
1946           name != vmSymbols::java_lang_ArrayIndexOutOfBoundsException() &&
1947           name != vmSymbols::java_lang_ArrayStoreException() &&
1948           name != vmSymbols::java_lang_ClassCastException() &&
1949           name != vmSymbols::java_lang_InternalError() &&
1950           name != vmSymbols::java_lang_NullPointerException() &&
1951           name != vmSymbols::jdk_internal_vm_PreemptedException()) {
1952         ResourceMark rm;
1953         fatal("special subgraph cannot have objects of type %s", subgraph_k->external_name());
1954       }
1955     }
1956   }
1957 }
1958 
1959 HeapShared::SeenObjectsTable* HeapShared::_seen_objects_table = nullptr;
1960 HeapShared::PendingOop HeapShared::_object_being_archived;
1961 size_t HeapShared::_num_new_walked_objs;
1962 size_t HeapShared::_num_new_archived_objs;

2214   }
2215 }
2216 
2217 void HeapShared::archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
2218                                           bool is_full_module_graph) {
2219   _num_total_subgraph_recordings = 0;
2220   _num_total_walked_objs = 0;
2221   _num_total_archived_objs = 0;
2222   _num_total_recorded_klasses = 0;
2223   _num_total_verifications = 0;
2224 
2225   // For each class X that has one or more archived fields:
2226   // [1] Dump the subgraph of each archived field
2227   // [2] Create a list of all the class of the objects that can be reached
2228   //     by any of these static fields.
2229   //     At runtime, these classes are initialized before X's archived fields
2230   //     are restored by HeapShared::initialize_from_archived_subgraph().
2231   for (int i = 0; fields[i].valid(); ) {
2232     ArchivableStaticFieldInfo* info = &fields[i];
2233     const char* klass_name = info->klass_name;

2234     start_recording_subgraph(info->klass, klass_name, is_full_module_graph);
2235 
2236     // If you have specified consecutive fields of the same klass in
2237     // fields[], these will be archived in the same
2238     // {start_recording_subgraph ... done_recording_subgraph} pass to
2239     // save time.
2240     for (; fields[i].valid(); i++) {
2241       ArchivableStaticFieldInfo* f = &fields[i];
2242       if (f->klass_name != klass_name) {
2243         break;
2244       }
2245 
2246       archive_reachable_objects_from_static_field(f->klass, f->klass_name,
2247                                                   f->offset, f->field_name);
2248     }
2249     done_recording_subgraph(info->klass, klass_name);
2250   }
2251 
2252   log_info(aot, heap)("Archived subgraph records = %zu",
2253                       _num_total_subgraph_recordings);

  46 #include "classfile/javaClasses.inline.hpp"
  47 #include "classfile/modules.hpp"
  48 #include "classfile/stringTable.hpp"
  49 #include "classfile/symbolTable.hpp"
  50 #include "classfile/systemDictionary.hpp"
  51 #include "classfile/systemDictionaryShared.hpp"
  52 #include "classfile/vmClasses.hpp"
  53 #include "classfile/vmSymbols.hpp"
  54 #include "gc/shared/collectedHeap.hpp"
  55 #include "gc/shared/gcLocker.hpp"
  56 #include "gc/shared/gcVMOperations.hpp"
  57 #include "logging/log.hpp"
  58 #include "logging/logStream.hpp"
  59 #include "memory/iterator.inline.hpp"
  60 #include "memory/resourceArea.hpp"
  61 #include "memory/universe.hpp"
  62 #include "oops/compressedOops.inline.hpp"
  63 #include "oops/fieldStreams.inline.hpp"
  64 #include "oops/objArrayOop.inline.hpp"
  65 #include "oops/oop.inline.hpp"
  66 #include "oops/oopCast.inline.hpp"
  67 #include "oops/oopHandle.inline.hpp"
  68 #include "oops/typeArrayOop.inline.hpp"
  69 #include "prims/jvmtiExport.hpp"
  70 #include "runtime/arguments.hpp"
  71 #include "runtime/fieldDescriptor.inline.hpp"
  72 #include "runtime/globals_extension.hpp"
  73 #include "runtime/init.hpp"
  74 #include "runtime/javaCalls.hpp"
  75 #include "runtime/mutexLocker.hpp"
  76 #include "runtime/safepointVerifiers.hpp"
  77 #include "utilities/bitMap.inline.hpp"
  78 #include "utilities/copy.hpp"
  79 #if INCLUDE_G1GC
  80 #include "gc/g1/g1CollectedHeap.hpp"
  81 #endif
  82 
  83 #if INCLUDE_CDS_JAVA_HEAP
  84 
  85 struct ArchivableStaticFieldInfo {
  86   const char* klass_name;

 580     MutexLocker ml(ScratchObjects_lock, Mutex::_no_safepoint_check_flag);
 581     OopHandle* handle = get(ptr);
 582     if (handle != nullptr) {
 583       handle->release(Universe::vm_global());
 584       remove(ptr);
 585     }
 586   }
 587 };
 588 
 589 void HeapShared::add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) {
 590   if (CDSConfig::is_dumping_preimage_static_archive() && scratch_resolved_references(src) != nullptr) {
 591     // We are in AOT training run. The class has been redefined and we are giving it a new resolved_reference.
 592     // Ignore it, as this class will be excluded from the AOT config.
 593     return;
 594   }
 595   if (SystemDictionaryShared::is_builtin_loader(src->pool_holder()->class_loader_data())) {
 596     _scratch_objects_table->set_oop(src, dest);
 597   }
 598 }
 599 
 600 refArrayOop HeapShared::scratch_resolved_references(ConstantPool* src) {
 601   oop rr = _scratch_objects_table->get_oop(src);
 602   return rr == nullptr ? nullptr : oop_cast<refArrayOop>(rr);
 603 }
 604 
 605  void HeapShared::init_dumping() {
 606    _scratch_objects_table = new (mtClass)MetaspaceObjToOopHandleTable();
 607    _pending_roots = new GrowableArrayCHeap<oop, mtClassShared>(500);
 608 }
 609 
 610 void HeapShared::init_scratch_objects_for_basic_type_mirrors(TRAPS) {
 611   for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
 612     BasicType bt = (BasicType)i;
 613     if (!is_reference_type(bt)) {
 614       oop m = java_lang_Class::create_basic_type_mirror(type2name(bt), bt, CHECK);
 615       _scratch_basic_type_mirrors[i] = OopHandle(Universe::vm_global(), m);
 616     }
 617   }
 618 }
 619 
 620 // Given java_mirror that represents a (primitive or reference) type T,
 621 // return the "scratch" version that represents the same type T. Note
 622 // that java_mirror will be returned if the mirror is already a scratch mirror.

 771     assert(success, "sanity");
 772   }
 773 
 774   if (log_is_enabled(Debug, aot, init)) {
 775     ResourceMark rm;
 776     log_debug(aot, init)("copied %3d field(s) in aot-initialized mirror %s%s%s", nfields, ik->external_name(),
 777                          ik->is_hidden() ? " (hidden)" : "",
 778                          ik->is_enum_subclass() ? " (enum)" : "");
 779   }
 780 }
 781 
 782 void HeapShared::copy_java_mirror(oop orig_mirror, oop scratch_m) {
 783   // We need to retain the identity_hash, because it may have been used by some hashtables
 784   // in the shared heap.
 785   if (!orig_mirror->fast_no_hash_check()) {
 786     intptr_t src_hash = orig_mirror->identity_hash();
 787     if (UseCompactObjectHeaders) {
 788       narrowKlass nk = CompressedKlassPointers::encode(orig_mirror->klass());
 789       scratch_m->set_mark(markWord::prototype().set_narrow_klass(nk).copy_set_hash(src_hash));
 790     } else {
 791       // For valhalla, the prototype header is the same as markWord::prototype();
 792       scratch_m->set_mark(markWord::prototype().copy_set_hash(src_hash));
 793     }
 794     assert(scratch_m->mark().is_unlocked(), "sanity");
 795 
 796     DEBUG_ONLY(intptr_t archived_hash = scratch_m->identity_hash());
 797     assert(src_hash == archived_hash, "Different hash codes: original " INTPTR_FORMAT ", archived " INTPTR_FORMAT, src_hash, archived_hash);
 798   }
 799 
 800   Klass* k = java_lang_Class::as_Klass(orig_mirror);
 801   if (k != nullptr && k->is_instance_klass()) {
 802     InstanceKlass* ik = InstanceKlass::cast(k);
 803 
 804     if (ik->is_inline_klass() && ik->is_initialized()) {
 805       // Only concrete value classes need the null_reset field
 806       InlineKlass* ilk = InlineKlass::cast(k);
 807       if (ilk->supports_nullable_layouts()) {
 808         scratch_m->obj_field_put(ilk->null_reset_value_offset(), ilk->null_reset_value());
 809       }
 810     }
 811 
 812     if (ik->has_acmp_maps_offset()) {
 813       int maps_offset = ik->acmp_maps_offset();
 814       oop maps = orig_mirror->obj_field(maps_offset);
 815       scratch_m->obj_field_put(maps_offset, maps);
 816     }
 817   }
 818 
 819   if (CDSConfig::is_dumping_aot_linked_classes()) {
 820     java_lang_Class::set_module(scratch_m, java_lang_Class::module(orig_mirror));
 821     java_lang_Class::set_protection_domain(scratch_m, java_lang_Class::protection_domain(orig_mirror));
 822   }
 823 }
 824 
 825 static objArrayOop get_archived_resolved_references(InstanceKlass* src_ik) {
 826   if (SystemDictionaryShared::is_builtin_loader(src_ik->class_loader_data())) {
 827     objArrayOop rr = src_ik->constants()->resolved_references_or_null();
 828     if (rr != nullptr && !HeapShared::is_too_large_to_archive(rr)) {
 829       return HeapShared::scratch_resolved_references(src_ik->constants());
 830     }
 831   }
 832   return nullptr;
 833 }
 834 
 835 int HeapShared::archive_exception_instance(oop exception) {
 836   bool success = archive_reachable_objects_from(1, _dump_time_special_subgraph, exception);
 837   assert(success, "sanity");
 838   return append_root(exception);

1014     // the original Klass*
1015     if (orig_k == vmClasses::String_klass() ||
1016         orig_k == vmClasses::Object_klass()) {
1017       // Initialized early during VM initialization. No need to be added
1018       // to the sub-graph object class list.
1019       return;
1020     }
1021     check_allowed_klass(InstanceKlass::cast(orig_k));
1022   } else if (orig_k->is_objArray_klass()) {
1023     Klass* abk = ObjArrayKlass::cast(orig_k)->bottom_klass();
1024     if (abk->is_instance_klass()) {
1025       assert(InstanceKlass::cast(abk)->defined_by_boot_loader(),
1026             "must be boot class");
1027       check_allowed_klass(InstanceKlass::cast(ObjArrayKlass::cast(orig_k)->bottom_klass()));
1028     }
1029     if (orig_k == Universe::objectArrayKlass()) {
1030       // Initialized early during Universe::genesis. No need to be added
1031       // to the list.
1032       return;
1033     }
1034     if (orig_k->is_flatArray_klass()) {
1035       _subgraph_object_klasses->append_if_missing(FlatArrayKlass::cast(orig_k)->element_klass());
1036     }
1037   } else {
1038     assert(orig_k->is_typeArray_klass(), "must be");
1039     // Primitive type arrays are created early during Universe::genesis.
1040     return;
1041   }
1042 
1043   if (log_is_enabled(Debug, aot, heap)) {
1044     if (!_subgraph_object_klasses->contains(orig_k)) {
1045       ResourceMark rm;
1046       log_debug(aot, heap)("Adding klass %s", orig_k->external_name());
1047     }
1048   }
1049 
1050   _subgraph_object_klasses->append_if_missing(orig_k);
1051   _has_non_early_klasses |= is_non_early_klass(orig_k);
1052 }
1053 
1054 void KlassSubGraphInfo::check_allowed_klass(InstanceKlass* ik) {
1055 #ifndef PRODUCT
1056   if (AOTClassInitializer::has_test_class()) {

1449         log_info(aot, heap)("subgraph %s cannot be used because full module graph is disabled",
1450                             k->external_name());
1451       }
1452       return nullptr;
1453     }
1454 
1455     if (record->has_non_early_klasses() && JvmtiExport::should_post_class_file_load_hook()) {
1456       if (log_is_enabled(Info, aot, heap)) {
1457         ResourceMark rm(THREAD);
1458         log_info(aot, heap)("subgraph %s cannot be used because JVMTI ClassFileLoadHook is enabled",
1459                             k->external_name());
1460       }
1461       return nullptr;
1462     }
1463 
1464     if (log_is_enabled(Info, aot, heap)) {
1465       ResourceMark rm;
1466       log_info(aot, heap)("%s subgraph %s ", do_init ? "init" : "resolve", k->external_name());
1467     }
1468 
1469     Array<Klass*>* klasses = record->subgraph_object_klasses();
1470 
1471     if (do_init && klasses != nullptr) {
1472       // All the classes of the oops in this subgraph are in the klasses array.
1473       // Link them first in case any of the oops are used in the <clinit> methods
1474       // invoked in the rest of this function.
1475       for (int i = 0; i < klasses->length(); i++) {
1476         Klass* klass = klasses->at(i);
1477         if (klass->in_aot_cache() && klass->is_instance_klass()) {
1478           InstanceKlass::cast(klass)->link_class(CHECK_NULL);
1479         }
1480       }
1481     }
1482 
1483     resolve_or_init(k, do_init, CHECK_NULL);
1484 
1485     // Load/link/initialize the klasses of the objects in the subgraph.
1486     // nullptr class loader is used.

1487     if (klasses != nullptr) {
1488       for (int i = 0; i < klasses->length(); i++) {
1489         Klass* klass = klasses->at(i);
1490         if (!klass->in_aot_cache()) {
1491           return nullptr;
1492         }
1493         resolve_or_init(klass, do_init, CHECK_NULL);
1494       }
1495     }
1496   }
1497 
1498   return record;
1499 }
1500 
1501 void HeapShared::resolve_or_init(const char* klass_name, bool do_init, TRAPS) {
1502   TempNewSymbol klass_name_sym =  SymbolTable::new_symbol(klass_name);
1503   InstanceKlass* k = SystemDictionaryShared::find_builtin_class(klass_name_sym);
1504   if (k == nullptr) {
1505     return;
1506   }
1507   assert(k->defined_by_boot_loader(), "sanity");
1508   resolve_or_init(k, false, CHECK);
1509   if (do_init) {
1510     resolve_or_init(k, true, CHECK);
1511   }
1512 }
1513 
1514 void HeapShared::resolve_or_init(Klass* k, bool do_init, TRAPS) {
1515   if (!do_init) {
1516     if (k->class_loader_data() == nullptr) {
1517       Klass* resolved_k = SystemDictionary::resolve_or_null(k->name(), CHECK);
1518       if (resolved_k->is_array_klass()) {
1519         assert(resolved_k == k || resolved_k == k->super(), "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");
1520       } else {
1521         assert(resolved_k == k, "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");
1522       }
1523     }
1524   } else {
1525     assert(k->class_loader_data() != nullptr, "must have been resolved by HeapShared::resolve_classes");
1526     if (k->is_instance_klass()) {
1527       InstanceKlass* ik = InstanceKlass::cast(k);
1528       ik->initialize(CHECK);
1529     } else if (k->is_objArray_klass()) {
1530       ObjArrayKlass* oak = ObjArrayKlass::cast(k);
1531       oak->initialize(CHECK);
1532     }
1533   }
1534 }
1535 
1536 void HeapShared::init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record) {
1537   verify_the_heap(k, "before");
1538 
1539   Array<int>* entry_field_records = record->entry_field_records();
1540   if (entry_field_records != nullptr) {
1541     int efr_len = entry_field_records->length();
1542     assert(efr_len % 2 == 0, "sanity");

1964   }
1965 }
1966 #endif
1967 
1968 void HeapShared::check_special_subgraph_classes() {
1969   if (CDSConfig::is_dumping_aot_linked_classes()) {
1970     // We can have aot-initialized classes (such as Enums) that can reference objects
1971     // of arbitrary types. Currently, we trust the JEP 483 implementation to only
1972     // aot-initialize classes that are "safe".
1973     //
1974     // TODO: we need an automatic tool that checks the safety of aot-initialized
1975     // classes (when we extend the set of aot-initialized classes beyond JEP 483)
1976     return;
1977   } else {
1978     // In this case, the special subgraph should contain a few specific types
1979     GrowableArray<Klass*>* klasses = _dump_time_special_subgraph->subgraph_object_klasses();
1980     int num = klasses->length();
1981     for (int i = 0; i < num; i++) {
1982       Klass* subgraph_k = klasses->at(i);
1983       Symbol* name = subgraph_k->name();
1984 
1985       if (subgraph_k->is_identity_class() &&
1986           name != vmSymbols::java_lang_Class() &&
1987           name != vmSymbols::java_lang_String() &&
1988           name != vmSymbols::java_lang_ArithmeticException() &&
1989           name != vmSymbols::java_lang_ArrayIndexOutOfBoundsException() &&
1990           name != vmSymbols::java_lang_ArrayStoreException() &&
1991           name != vmSymbols::java_lang_ClassCastException() &&
1992           name != vmSymbols::java_lang_InternalError() &&
1993           name != vmSymbols::java_lang_NullPointerException() &&
1994           name != vmSymbols::jdk_internal_vm_PreemptedException()) {
1995         ResourceMark rm;
1996         fatal("special subgraph cannot have objects of type %s", subgraph_k->external_name());
1997       }
1998     }
1999   }
2000 }
2001 
2002 HeapShared::SeenObjectsTable* HeapShared::_seen_objects_table = nullptr;
2003 HeapShared::PendingOop HeapShared::_object_being_archived;
2004 size_t HeapShared::_num_new_walked_objs;
2005 size_t HeapShared::_num_new_archived_objs;

2257   }
2258 }
2259 
2260 void HeapShared::archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
2261                                           bool is_full_module_graph) {
2262   _num_total_subgraph_recordings = 0;
2263   _num_total_walked_objs = 0;
2264   _num_total_archived_objs = 0;
2265   _num_total_recorded_klasses = 0;
2266   _num_total_verifications = 0;
2267 
2268   // For each class X that has one or more archived fields:
2269   // [1] Dump the subgraph of each archived field
2270   // [2] Create a list of all the class of the objects that can be reached
2271   //     by any of these static fields.
2272   //     At runtime, these classes are initialized before X's archived fields
2273   //     are restored by HeapShared::initialize_from_archived_subgraph().
2274   for (int i = 0; fields[i].valid(); ) {
2275     ArchivableStaticFieldInfo* info = &fields[i];
2276     const char* klass_name = info->klass_name;
2277 
2278     start_recording_subgraph(info->klass, klass_name, is_full_module_graph);
2279 
2280     // If you have specified consecutive fields of the same klass in
2281     // fields[], these will be archived in the same
2282     // {start_recording_subgraph ... done_recording_subgraph} pass to
2283     // save time.
2284     for (; fields[i].valid(); i++) {
2285       ArchivableStaticFieldInfo* f = &fields[i];
2286       if (f->klass_name != klass_name) {
2287         break;
2288       }
2289 
2290       archive_reachable_objects_from_static_field(f->klass, f->klass_name,
2291                                                   f->offset, f->field_name);
2292     }
2293     done_recording_subgraph(info->klass, klass_name);
2294   }
2295 
2296   log_info(aot, heap)("Archived subgraph records = %zu",
2297                       _num_total_subgraph_recordings);
< prev index next >