1 /* 2 * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_CDS_HEAPSHARED_HPP 26 #define SHARE_CDS_HEAPSHARED_HPP 27 28 #include "cds/aotMetaspace.hpp" 29 #include "cds/dumpTimeClassInfo.hpp" 30 #include "classfile/compactHashtable.hpp" 31 #include "classfile/javaClasses.hpp" 32 #include "gc/shared/gc_globals.hpp" 33 #include "memory/allocation.hpp" 34 #include "memory/allStatic.hpp" 35 #include "oops/compressedOops.hpp" 36 #include "oops/oop.hpp" 37 #include "oops/oopHandle.hpp" 38 #include "oops/oopsHierarchy.hpp" 39 #include "utilities/growableArray.hpp" 40 #include "utilities/hashTable.hpp" 41 42 #if INCLUDE_CDS_JAVA_HEAP 43 class DumpedInternedStrings; 44 class FileMapInfo; 45 class KlassSubGraphInfo; 46 class MetaspaceObjToOopHandleTable; 47 class ResourceBitMap; 48 49 struct ArchivableStaticFieldInfo; 50 class ArchiveHeapInfo; 51 52 #define ARCHIVED_BOOT_LAYER_CLASS "jdk/internal/module/ArchivedBootLayer" 53 #define ARCHIVED_BOOT_LAYER_FIELD "archivedBootLayer" 54 55 // A dump time sub-graph info for Klass _k. It includes the entry points 56 // (static fields in _k's mirror) of the archived sub-graphs reachable 57 // from _k's mirror. It also contains a list of Klasses of the objects 58 // within the sub-graphs. 59 class KlassSubGraphInfo: public CHeapObj<mtClass> { 60 private: 61 // The class that contains the static field(s) as the entry point(s) 62 // of archived object sub-graph(s). 63 Klass* _k; 64 // A list of classes need to be loaded and initialized before the archived 65 // object sub-graphs can be accessed at runtime. 66 GrowableArray<Klass*>* _subgraph_object_klasses; 67 // A list of _k's static fields as the entry points of archived sub-graphs. 68 // For each entry field, it is a tuple of field_offset, field_value 69 GrowableArray<int>* _subgraph_entry_fields; 70 71 // Does this KlassSubGraphInfo belong to the archived full module graph 72 bool _is_full_module_graph; 73 74 // Does this KlassSubGraphInfo references any classes that were loaded while 75 // JvmtiExport::is_early_phase()!=true. If so, this KlassSubGraphInfo cannot be 76 // used at runtime if JVMTI ClassFileLoadHook is enabled. 77 bool _has_non_early_klasses; 78 static bool is_non_early_klass(Klass* k); 79 static void check_allowed_klass(InstanceKlass* ik); 80 public: 81 KlassSubGraphInfo(Klass* k, bool is_full_module_graph) : 82 _k(k), _subgraph_object_klasses(nullptr), 83 _subgraph_entry_fields(nullptr), 84 _is_full_module_graph(is_full_module_graph), 85 _has_non_early_klasses(false) {} 86 87 ~KlassSubGraphInfo() { 88 if (_subgraph_object_klasses != nullptr) { 89 delete _subgraph_object_klasses; 90 } 91 if (_subgraph_entry_fields != nullptr) { 92 delete _subgraph_entry_fields; 93 } 94 }; 95 96 Klass* klass() { return _k; } 97 GrowableArray<Klass*>* subgraph_object_klasses() { 98 return _subgraph_object_klasses; 99 } 100 GrowableArray<int>* subgraph_entry_fields() { 101 return _subgraph_entry_fields; 102 } 103 void add_subgraph_entry_field(int static_field_offset, oop v); 104 void add_subgraph_object_klass(Klass *orig_k); 105 int num_subgraph_object_klasses() { 106 return _subgraph_object_klasses == nullptr ? 0 : 107 _subgraph_object_klasses->length(); 108 } 109 bool is_full_module_graph() const { return _is_full_module_graph; } 110 bool has_non_early_klasses() const { return _has_non_early_klasses; } 111 }; 112 113 // An archived record of object sub-graphs reachable from static 114 // fields within _k's mirror. The record is reloaded from the archive 115 // at runtime. 116 class ArchivedKlassSubGraphInfoRecord { 117 private: 118 Klass* _k; 119 bool _is_full_module_graph; 120 bool _has_non_early_klasses; 121 122 // contains pairs of field offset and value for each subgraph entry field 123 Array<int>* _entry_field_records; 124 125 // klasses of objects in archived sub-graphs referenced from the entry points 126 // (static fields) in the containing class 127 Array<Klass*>* _subgraph_object_klasses; 128 public: 129 ArchivedKlassSubGraphInfoRecord() : 130 _k(nullptr), _entry_field_records(nullptr), _subgraph_object_klasses(nullptr) {} 131 void init(KlassSubGraphInfo* info); 132 Klass* klass() const { return _k; } 133 Array<int>* entry_field_records() const { return _entry_field_records; } 134 Array<Klass*>* subgraph_object_klasses() const { return _subgraph_object_klasses; } 135 bool is_full_module_graph() const { return _is_full_module_graph; } 136 bool has_non_early_klasses() const { return _has_non_early_klasses; } 137 }; 138 #endif // INCLUDE_CDS_JAVA_HEAP 139 140 struct LoadedArchiveHeapRegion; 141 142 class HeapShared: AllStatic { 143 friend class VerifySharedOopClosure; 144 145 public: 146 static bool is_subgraph_root_class(InstanceKlass* ik); 147 148 // Scratch objects for archiving Klass::java_mirror() 149 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr); 150 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr); 151 static oop scratch_java_mirror(oop java_mirror) NOT_CDS_JAVA_HEAP_RETURN_(nullptr); 152 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false); 153 154 private: 155 #if INCLUDE_CDS_JAVA_HEAP 156 static DumpedInternedStrings *_dumped_interned_strings; 157 158 // statistics 159 constexpr static int ALLOC_STAT_SLOTS = 16; 160 static size_t _alloc_count[ALLOC_STAT_SLOTS]; 161 static size_t _alloc_size[ALLOC_STAT_SLOTS]; 162 static size_t _total_obj_count; 163 static size_t _total_obj_size; // in HeapWords 164 165 static void count_allocation(size_t size); 166 static void print_stats(); 167 public: 168 static void debug_trace(); 169 static unsigned oop_hash(oop const& p); 170 static unsigned oop_handle_hash(OopHandle const& oh); 171 static unsigned oop_handle_hash_raw(OopHandle const& oh); 172 static bool oop_handle_equals(const OopHandle& a, const OopHandle& b); 173 static unsigned string_oop_hash(oop const& string) { 174 return java_lang_String::hash_code(string); 175 } 176 177 class CopyKlassSubGraphInfoToArchive; 178 179 class CachedOopInfo { 180 // Used by CDSHeapVerifier. 181 OopHandle _orig_referrer; 182 183 // The location of this object inside ArchiveHeapWriter::_buffer 184 size_t _buffer_offset; 185 186 // One or more fields in this object are pointing to non-null oops. 187 bool _has_oop_pointers; 188 189 // One or more fields in this object are pointing to MetaspaceObj 190 bool _has_native_pointers; 191 public: 192 CachedOopInfo(OopHandle orig_referrer, bool has_oop_pointers) 193 : _orig_referrer(orig_referrer), 194 _buffer_offset(0), 195 _has_oop_pointers(has_oop_pointers), 196 _has_native_pointers(false) {} 197 oop orig_referrer() const; 198 void set_buffer_offset(size_t offset) { _buffer_offset = offset; } 199 size_t buffer_offset() const { return _buffer_offset; } 200 bool has_oop_pointers() const { return _has_oop_pointers; } 201 bool has_native_pointers() const { return _has_native_pointers; } 202 void set_has_native_pointers() { _has_native_pointers = true; } 203 }; 204 205 private: 206 static const int INITIAL_TABLE_SIZE = 15889; // prime number 207 static const int MAX_TABLE_SIZE = 1000000; 208 typedef ResizeableHashTable<OopHandle, CachedOopInfo, 209 AnyObj::C_HEAP, 210 mtClassShared, 211 HeapShared::oop_handle_hash_raw, 212 HeapShared::oop_handle_equals> ArchivedObjectCache; 213 static ArchivedObjectCache* _archived_object_cache; 214 215 class DumpTimeKlassSubGraphInfoTable 216 : public HashTable<Klass*, KlassSubGraphInfo, 217 137, // prime number 218 AnyObj::C_HEAP, 219 mtClassShared, 220 DumpTimeSharedClassTable_hash> { 221 public: 222 int _count; 223 }; 224 225 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable 226 inline static bool record_equals_compact_hashtable_entry( 227 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) { 228 return (value->klass() == key); 229 } 230 231 private: 232 typedef OffsetCompactHashtable< 233 const Klass*, 234 const ArchivedKlassSubGraphInfoRecord*, 235 record_equals_compact_hashtable_entry 236 > RunTimeKlassSubGraphInfoTable; 237 238 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table; 239 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table; 240 241 static CachedOopInfo make_cached_oop_info(oop obj, oop referrer); 242 static ArchivedKlassSubGraphInfoRecord* archive_subgraph_info(KlassSubGraphInfo* info); 243 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[], 244 bool is_full_module_graph); 245 246 // Archive object sub-graph starting from the given static field 247 // in Klass k's mirror. 248 static void archive_reachable_objects_from_static_field( 249 InstanceKlass* k, const char* klass_name, 250 int field_offset, const char* field_name); 251 252 static void verify_subgraph_from_static_field( 253 InstanceKlass* k, int field_offset) PRODUCT_RETURN; 254 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN; 255 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN; 256 static void check_special_subgraph_classes(); 257 258 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph); 259 static KlassSubGraphInfo* get_subgraph_info(Klass *k); 260 261 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 262 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS); 263 264 // UseCompressedOops only: Used by decode_from_archive 265 static address _narrow_oop_base; 266 static int _narrow_oop_shift; 267 268 // !UseCompressedOops only: used to relocate pointers to the archived objects 269 static ptrdiff_t _runtime_delta; 270 271 typedef ResizeableHashTable<oop, bool, 272 AnyObj::C_HEAP, 273 mtClassShared, 274 HeapShared::oop_hash> SeenObjectsTable; 275 276 static SeenObjectsTable *_seen_objects_table; 277 278 // The "special subgraph" contains all the archived objects that are reachable 279 // from the following roots: 280 // - interned strings 281 // - Klass::java_mirror() -- including aot-initialized mirrors such as those of Enum klasses. 282 // - ConstantPool::resolved_references() 283 // - Universe::<xxx>_exception_instance() 284 static KlassSubGraphInfo* _dump_time_special_subgraph; // for collecting info during dump time 285 static ArchivedKlassSubGraphInfoRecord* _run_time_special_subgraph; // for initializing classes during run time. 286 287 static GrowableArrayCHeap<oop, mtClassShared>* _pending_roots; 288 static GrowableArrayCHeap<OopHandle, mtClassShared>* _root_segments; 289 static int _root_segment_max_size_elems; 290 static OopHandle _scratch_basic_type_mirrors[T_VOID+1]; 291 static MetaspaceObjToOopHandleTable* _scratch_objects_table; 292 293 static void init_seen_objects_table() { 294 assert(_seen_objects_table == nullptr, "must be"); 295 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE); 296 } 297 static void delete_seen_objects_table() { 298 assert(_seen_objects_table != nullptr, "must be"); 299 delete _seen_objects_table; 300 _seen_objects_table = nullptr; 301 } 302 303 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph) 304 static int _num_new_walked_objs; 305 static int _num_new_archived_objs; 306 static int _num_old_recorded_klasses; 307 308 // Statistics (for all archived subgraphs) 309 static int _num_total_subgraph_recordings; 310 static int _num_total_walked_objs; 311 static int _num_total_archived_objs; 312 static int _num_total_recorded_klasses; 313 static int _num_total_verifications; 314 315 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name, 316 bool is_full_module_graph); 317 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name); 318 319 static bool has_been_seen_during_subgraph_recording(oop obj); 320 static void set_has_been_seen_during_subgraph_recording(oop obj); 321 static bool archive_object(oop obj, oop referrer, KlassSubGraphInfo* subgraph_info); 322 323 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]); 324 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k); 325 static void clear_archived_roots_of(Klass* k); 326 static const ArchivedKlassSubGraphInfoRecord* 327 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS); 328 static void resolve_or_init(const char* klass_name, bool do_init, TRAPS); 329 static void resolve_or_init(Klass* k, bool do_init, TRAPS); 330 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record); 331 332 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions, 333 MemRegion& archive_space); 334 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions, 335 uintptr_t buffer); 336 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions, 337 int num_loaded_regions, uintptr_t buffer); 338 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info, 339 int num_loaded_regions); 340 static void fill_failed_loaded_region(); 341 static void mark_native_pointers(oop orig_obj); 342 static bool has_been_archived(oop orig_obj); 343 static void prepare_resolved_references(); 344 static void archive_strings(); 345 static void archive_subgraphs(); 346 static void copy_java_mirror(oop orig_mirror, oop scratch_m); 347 348 // PendingOop and PendingOopStack are used for recursively discovering all cacheable 349 // heap objects. The recursion is done using PendingOopStack so we won't overflow the 350 // C stack with deep reference chains. 351 class PendingOop { 352 oop _obj; 353 oop _referrer; 354 int _level; 355 356 public: 357 PendingOop() : _obj(nullptr), _referrer(nullptr), _level(-1) {} 358 PendingOop(oop obj, oop referrer, int level) : _obj(obj), _referrer(referrer), _level(level) {} 359 360 oop obj() const { return _obj; } 361 oop referrer() const { return _referrer; } 362 int level() const { return _level; } 363 }; 364 365 class OopFieldPusher; 366 using PendingOopStack = GrowableArrayCHeap<PendingOop, mtClassShared>; 367 368 static PendingOop _object_being_archived; 369 static bool walk_one_object(PendingOopStack* stack, int level, KlassSubGraphInfo* subgraph_info, 370 oop orig_obj, oop referrer); 371 372 public: 373 static void reset_archived_object_states(TRAPS); 374 static void create_archived_object_cache() { 375 _archived_object_cache = 376 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE); 377 } 378 static void destroy_archived_object_cache() { 379 delete _archived_object_cache; 380 _archived_object_cache = nullptr; 381 } 382 static ArchivedObjectCache* archived_object_cache() { 383 return _archived_object_cache; 384 } 385 386 static CachedOopInfo* get_cached_oop_info(oop orig_obj) { 387 OopHandle oh(&orig_obj); 388 return _archived_object_cache->get(oh); 389 } 390 391 static int archive_exception_instance(oop exception); 392 393 static bool archive_reachable_objects_from(int level, 394 KlassSubGraphInfo* subgraph_info, 395 oop orig_obj); 396 397 static void add_to_dumped_interned_strings(oop string); 398 static bool is_dumped_interned_string(oop o); 399 400 // Scratch objects for archiving Klass::java_mirror() 401 static void set_scratch_java_mirror(Klass* k, oop mirror); 402 static void remove_scratch_objects(Klass* k); 403 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers); 404 static void set_has_native_pointers(oop src_obj); 405 406 // We use the HeapShared::roots() array to make sure that objects stored in the 407 // archived heap region are not prematurely collected. These roots include: 408 // 409 // - mirrors of classes that have not yet been loaded. 410 // - ConstantPool::resolved_references() of classes that have not yet been loaded. 411 // - ArchivedKlassSubGraphInfoRecords that have not been initialized 412 // - java.lang.Module objects that have not yet been added to the module graph 413 // 414 // When a mirror M becomes referenced by a newly loaded class K, M will be removed 415 // from HeapShared::roots() via clear_root(), and K will be responsible for 416 // keeping M alive. 417 // 418 // Other types of roots are also cleared similarly when they become referenced. 419 420 // Dump-time only. Returns the index of the root, which can be used at run time to read 421 // the root using get_root(index, ...). 422 static int append_root(oop obj); 423 static GrowableArrayCHeap<oop, mtClassShared>* pending_roots() { return _pending_roots; } 424 425 // Dump-time and runtime 426 static objArrayOop root_segment(int segment_idx); 427 static oop get_root(int index, bool clear=false); 428 429 // Run-time only 430 static void clear_root(int index); 431 432 static void get_segment_indexes(int index, int& segment_index, int& internal_index); 433 434 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN; 435 #endif // INCLUDE_CDS_JAVA_HEAP 436 437 public: 438 static void write_heap(ArchiveHeapInfo* heap_info) NOT_CDS_JAVA_HEAP_RETURN; 439 static objArrayOop scratch_resolved_references(ConstantPool* src); 440 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN; 441 static void init_dumping() NOT_CDS_JAVA_HEAP_RETURN; 442 static void init_scratch_objects_for_basic_type_mirrors(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 443 static void init_box_classes(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 444 static bool is_heap_region(int idx) { 445 CDS_JAVA_HEAP_ONLY(return (idx == AOTMetaspace::hp);) 446 NOT_CDS_JAVA_HEAP_RETURN_(false); 447 } 448 static void delete_tables_with_raw_oops() NOT_CDS_JAVA_HEAP_RETURN; 449 450 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN; 451 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN; 452 453 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 454 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN; 455 static void add_root_segment(objArrayOop segment_oop) NOT_CDS_JAVA_HEAP_RETURN; 456 static void init_root_segment_sizes(int max_size_elems) NOT_CDS_JAVA_HEAP_RETURN; 457 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN; 458 459 #ifndef PRODUCT 460 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false); 461 static void initialize_test_class_from_archive(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 462 #endif 463 464 static void initialize_java_lang_invoke(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 465 static void init_classes_for_special_subgraph(Handle loader, TRAPS) NOT_CDS_JAVA_HEAP_RETURN; 466 467 static bool is_lambda_form_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false); 468 static bool is_lambda_proxy_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false); 469 static bool is_string_concat_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false); 470 static bool is_archivable_hidden_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false); 471 472 // Used by AOTArtifactFinder 473 static void start_scanning_for_oops(); 474 static void end_scanning_for_oops(); 475 static void scan_java_class(Klass* k); 476 static void scan_java_mirror(oop orig_mirror); 477 static void copy_and_rescan_aot_inited_mirror(InstanceKlass* ik); 478 }; 479 480 #if INCLUDE_CDS_JAVA_HEAP 481 class DumpedInternedStrings : 482 public ResizeableHashTable<oop, bool, 483 AnyObj::C_HEAP, 484 mtClassShared, 485 HeapShared::string_oop_hash> 486 { 487 public: 488 DumpedInternedStrings(unsigned size, unsigned max_size) : 489 ResizeableHashTable<oop, bool, 490 AnyObj::C_HEAP, 491 mtClassShared, 492 HeapShared::string_oop_hash>(size, max_size) {} 493 }; 494 #endif 495 496 #endif // SHARE_CDS_HEAPSHARED_HPP