790 assert(success, "sanity");
791 }
792
793 if (log_is_enabled(Debug, aot, init)) {
794 ResourceMark rm;
795 log_debug(aot, init)("copied %3d field(s) in aot-initialized mirror %s%s%s", nfields, ik->external_name(),
796 ik->is_hidden() ? " (hidden)" : "",
797 ik->is_enum_subclass() ? " (enum)" : "");
798 }
799 }
800
801 void HeapShared::copy_java_mirror(oop orig_mirror, oop scratch_m) {
802 // We need to retain the identity_hash, because it may have been used by some hashtables
803 // in the shared heap.
804 if (!orig_mirror->fast_no_hash_check()) {
805 intptr_t src_hash = orig_mirror->identity_hash();
806 if (UseCompactObjectHeaders) {
807 narrowKlass nk = CompressedKlassPointers::encode(orig_mirror->klass());
808 scratch_m->set_mark(markWord::prototype().set_narrow_klass(nk).copy_set_hash(src_hash));
809 } else {
810 scratch_m->set_mark(markWord::prototype().copy_set_hash(src_hash));
811 }
812 assert(scratch_m->mark().is_unlocked(), "sanity");
813
814 DEBUG_ONLY(intptr_t archived_hash = scratch_m->identity_hash());
815 assert(src_hash == archived_hash, "Different hash codes: original " INTPTR_FORMAT ", archived " INTPTR_FORMAT, src_hash, archived_hash);
816 }
817
818 if (CDSConfig::is_dumping_aot_linked_classes()) {
819 java_lang_Class::set_module(scratch_m, java_lang_Class::module(orig_mirror));
820 java_lang_Class::set_protection_domain(scratch_m, java_lang_Class::protection_domain(orig_mirror));
821 }
822 }
823
824 static objArrayOop get_archived_resolved_references(InstanceKlass* src_ik) {
825 if (SystemDictionaryShared::is_builtin_loader(src_ik->class_loader_data())) {
826 objArrayOop rr = src_ik->constants()->resolved_references_or_null();
827 if (rr != nullptr && !HeapShared::is_too_large_to_archive(rr)) {
828 return HeapShared::scratch_resolved_references(src_ik->constants());
829 }
1497 return record;
1498 }
1499
1500 void HeapShared::resolve_or_init(const char* klass_name, bool do_init, TRAPS) {
1501 TempNewSymbol klass_name_sym = SymbolTable::new_symbol(klass_name);
1502 InstanceKlass* k = SystemDictionaryShared::find_builtin_class(klass_name_sym);
1503 if (k == nullptr) {
1504 return;
1505 }
1506 assert(k->defined_by_boot_loader(), "sanity");
1507 resolve_or_init(k, false, CHECK);
1508 if (do_init) {
1509 resolve_or_init(k, true, CHECK);
1510 }
1511 }
1512
1513 void HeapShared::resolve_or_init(Klass* k, bool do_init, TRAPS) {
1514 if (!do_init) {
1515 if (k->class_loader_data() == nullptr) {
1516 Klass* resolved_k = SystemDictionary::resolve_or_null(k->name(), CHECK);
1517 assert(resolved_k == k, "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");
1518 }
1519 } else {
1520 assert(k->class_loader_data() != nullptr, "must have been resolved by HeapShared::resolve_classes");
1521 if (k->is_instance_klass()) {
1522 InstanceKlass* ik = InstanceKlass::cast(k);
1523 ik->initialize(CHECK);
1524 } else if (k->is_objArray_klass()) {
1525 ObjArrayKlass* oak = ObjArrayKlass::cast(k);
1526 oak->initialize(CHECK);
1527 }
1528 }
1529 }
1530
1531 void HeapShared::init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record) {
1532 verify_the_heap(k, "before");
1533
1534 Array<int>* entry_field_records = record->entry_field_records();
1535 if (entry_field_records != nullptr) {
1536 int efr_len = entry_field_records->length();
1537 assert(efr_len % 2 == 0, "sanity");
2251 }
2252 }
2253
2254 void HeapShared::archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
2255 bool is_full_module_graph) {
2256 _num_total_subgraph_recordings = 0;
2257 _num_total_walked_objs = 0;
2258 _num_total_archived_objs = 0;
2259 _num_total_recorded_klasses = 0;
2260 _num_total_verifications = 0;
2261
2262 // For each class X that has one or more archived fields:
2263 // [1] Dump the subgraph of each archived field
2264 // [2] Create a list of all the class of the objects that can be reached
2265 // by any of these static fields.
2266 // At runtime, these classes are initialized before X's archived fields
2267 // are restored by HeapShared::initialize_from_archived_subgraph().
2268 for (int i = 0; fields[i].valid(); ) {
2269 ArchivableStaticFieldInfo* info = &fields[i];
2270 const char* klass_name = info->klass_name;
2271 start_recording_subgraph(info->klass, klass_name, is_full_module_graph);
2272
2273 // If you have specified consecutive fields of the same klass in
2274 // fields[], these will be archived in the same
2275 // {start_recording_subgraph ... done_recording_subgraph} pass to
2276 // save time.
2277 for (; fields[i].valid(); i++) {
2278 ArchivableStaticFieldInfo* f = &fields[i];
2279 if (f->klass_name != klass_name) {
2280 break;
2281 }
2282
2283 archive_reachable_objects_from_static_field(f->klass, f->klass_name,
2284 f->offset, f->field_name);
2285 }
2286 done_recording_subgraph(info->klass, klass_name);
2287 }
2288
2289 log_info(aot, heap)("Archived subgraph records = %zu",
2290 _num_total_subgraph_recordings);
|
790 assert(success, "sanity");
791 }
792
793 if (log_is_enabled(Debug, aot, init)) {
794 ResourceMark rm;
795 log_debug(aot, init)("copied %3d field(s) in aot-initialized mirror %s%s%s", nfields, ik->external_name(),
796 ik->is_hidden() ? " (hidden)" : "",
797 ik->is_enum_subclass() ? " (enum)" : "");
798 }
799 }
800
801 void HeapShared::copy_java_mirror(oop orig_mirror, oop scratch_m) {
802 // We need to retain the identity_hash, because it may have been used by some hashtables
803 // in the shared heap.
804 if (!orig_mirror->fast_no_hash_check()) {
805 intptr_t src_hash = orig_mirror->identity_hash();
806 if (UseCompactObjectHeaders) {
807 narrowKlass nk = CompressedKlassPointers::encode(orig_mirror->klass());
808 scratch_m->set_mark(markWord::prototype().set_narrow_klass(nk).copy_set_hash(src_hash));
809 } else {
810 // For valhalla, the prototype header is the same as markWord::prototype();
811 scratch_m->set_mark(markWord::prototype().copy_set_hash(src_hash));
812 }
813 assert(scratch_m->mark().is_unlocked(), "sanity");
814
815 DEBUG_ONLY(intptr_t archived_hash = scratch_m->identity_hash());
816 assert(src_hash == archived_hash, "Different hash codes: original " INTPTR_FORMAT ", archived " INTPTR_FORMAT, src_hash, archived_hash);
817 }
818
819 if (CDSConfig::is_dumping_aot_linked_classes()) {
820 java_lang_Class::set_module(scratch_m, java_lang_Class::module(orig_mirror));
821 java_lang_Class::set_protection_domain(scratch_m, java_lang_Class::protection_domain(orig_mirror));
822 }
823 }
824
825 static objArrayOop get_archived_resolved_references(InstanceKlass* src_ik) {
826 if (SystemDictionaryShared::is_builtin_loader(src_ik->class_loader_data())) {
827 objArrayOop rr = src_ik->constants()->resolved_references_or_null();
828 if (rr != nullptr && !HeapShared::is_too_large_to_archive(rr)) {
829 return HeapShared::scratch_resolved_references(src_ik->constants());
830 }
1498 return record;
1499 }
1500
1501 void HeapShared::resolve_or_init(const char* klass_name, bool do_init, TRAPS) {
1502 TempNewSymbol klass_name_sym = SymbolTable::new_symbol(klass_name);
1503 InstanceKlass* k = SystemDictionaryShared::find_builtin_class(klass_name_sym);
1504 if (k == nullptr) {
1505 return;
1506 }
1507 assert(k->defined_by_boot_loader(), "sanity");
1508 resolve_or_init(k, false, CHECK);
1509 if (do_init) {
1510 resolve_or_init(k, true, CHECK);
1511 }
1512 }
1513
1514 void HeapShared::resolve_or_init(Klass* k, bool do_init, TRAPS) {
1515 if (!do_init) {
1516 if (k->class_loader_data() == nullptr) {
1517 Klass* resolved_k = SystemDictionary::resolve_or_null(k->name(), CHECK);
1518 if (resolved_k->is_array_klass()) {
1519 assert(resolved_k == k || resolved_k == k->super(), "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");
1520 } else {
1521 assert(resolved_k == k, "classes used by archived heap must not be replaced by JVMTI ClassFileLoadHook");
1522 }
1523 }
1524 } else {
1525 assert(k->class_loader_data() != nullptr, "must have been resolved by HeapShared::resolve_classes");
1526 if (k->is_instance_klass()) {
1527 InstanceKlass* ik = InstanceKlass::cast(k);
1528 ik->initialize(CHECK);
1529 } else if (k->is_objArray_klass()) {
1530 ObjArrayKlass* oak = ObjArrayKlass::cast(k);
1531 oak->initialize(CHECK);
1532 }
1533 }
1534 }
1535
1536 void HeapShared::init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record) {
1537 verify_the_heap(k, "before");
1538
1539 Array<int>* entry_field_records = record->entry_field_records();
1540 if (entry_field_records != nullptr) {
1541 int efr_len = entry_field_records->length();
1542 assert(efr_len % 2 == 0, "sanity");
2256 }
2257 }
2258
2259 void HeapShared::archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
2260 bool is_full_module_graph) {
2261 _num_total_subgraph_recordings = 0;
2262 _num_total_walked_objs = 0;
2263 _num_total_archived_objs = 0;
2264 _num_total_recorded_klasses = 0;
2265 _num_total_verifications = 0;
2266
2267 // For each class X that has one or more archived fields:
2268 // [1] Dump the subgraph of each archived field
2269 // [2] Create a list of all the class of the objects that can be reached
2270 // by any of these static fields.
2271 // At runtime, these classes are initialized before X's archived fields
2272 // are restored by HeapShared::initialize_from_archived_subgraph().
2273 for (int i = 0; fields[i].valid(); ) {
2274 ArchivableStaticFieldInfo* info = &fields[i];
2275 const char* klass_name = info->klass_name;
2276
2277 start_recording_subgraph(info->klass, klass_name, is_full_module_graph);
2278
2279 // If you have specified consecutive fields of the same klass in
2280 // fields[], these will be archived in the same
2281 // {start_recording_subgraph ... done_recording_subgraph} pass to
2282 // save time.
2283 for (; fields[i].valid(); i++) {
2284 ArchivableStaticFieldInfo* f = &fields[i];
2285 if (f->klass_name != klass_name) {
2286 break;
2287 }
2288
2289 archive_reachable_objects_from_static_field(f->klass, f->klass_name,
2290 f->offset, f->field_name);
2291 }
2292 done_recording_subgraph(info->klass, klass_name);
2293 }
2294
2295 log_info(aot, heap)("Archived subgraph records = %zu",
2296 _num_total_subgraph_recordings);
|