1 /*
  2  * Copyright (c) 2018, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CDS_HEAPSHARED_HPP
 26 #define SHARE_CDS_HEAPSHARED_HPP
 27 
 28 #include "cds/dumpTimeClassInfo.hpp"
 29 #include "cds/metaspaceShared.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "gc/shared/gc_globals.hpp"
 33 #include "memory/allocation.hpp"
 34 #include "memory/allStatic.hpp"
 35 #include "oops/compressedOops.hpp"
 36 #include "oops/oop.hpp"
 37 #include "oops/oopHandle.hpp"
 38 #include "oops/oopsHierarchy.hpp"
 39 #include "utilities/growableArray.hpp"
 40 #include "utilities/resourceHash.hpp"
 41 
 42 #if INCLUDE_CDS_JAVA_HEAP
 43 class DumpedInternedStrings;
 44 class FileMapInfo;
 45 class KlassSubGraphInfo;
 46 class MetaspaceObjToOopHandleTable;
 47 class ResourceBitMap;
 48 
 49 struct ArchivableStaticFieldInfo;
 50 class ArchiveHeapInfo;
 51 
 52 #define ARCHIVED_BOOT_LAYER_CLASS "jdk/internal/module/ArchivedBootLayer"
 53 #define ARCHIVED_BOOT_LAYER_FIELD "archivedBootLayer"
 54 
 55 // A dump time sub-graph info for Klass _k. It includes the entry points
 56 // (static fields in _k's mirror) of the archived sub-graphs reachable
 57 // from _k's mirror. It also contains a list of Klasses of the objects
 58 // within the sub-graphs.
 59 class KlassSubGraphInfo: public CHeapObj<mtClass> {
 60  private:
 61   // The class that contains the static field(s) as the entry point(s)
 62   // of archived object sub-graph(s).
 63   Klass* _k;
 64   // A list of classes need to be loaded and initialized before the archived
 65   // object sub-graphs can be accessed at runtime.
 66   GrowableArray<Klass*>* _subgraph_object_klasses;
 67   // A list of _k's static fields as the entry points of archived sub-graphs.
 68   // For each entry field, it is a tuple of field_offset, field_value
 69   GrowableArray<int>* _subgraph_entry_fields;
 70 
 71   // Does this KlassSubGraphInfo belong to the archived full module graph
 72   bool _is_full_module_graph;
 73 
 74   // Does this KlassSubGraphInfo references any classes that were loaded while
 75   // JvmtiExport::is_early_phase()!=true. If so, this KlassSubGraphInfo cannot be
 76   // used at runtime if JVMTI ClassFileLoadHook is enabled.
 77   bool _has_non_early_klasses;
 78   static bool is_non_early_klass(Klass* k);
 79   static void check_allowed_klass(InstanceKlass* ik);
 80  public:
 81   KlassSubGraphInfo(Klass* k, bool is_full_module_graph) :
 82     _k(k),  _subgraph_object_klasses(nullptr),
 83     _subgraph_entry_fields(nullptr),
 84     _is_full_module_graph(is_full_module_graph),
 85     _has_non_early_klasses(false) {}
 86 
 87   ~KlassSubGraphInfo() {
 88     if (_subgraph_object_klasses != nullptr) {
 89       delete _subgraph_object_klasses;
 90     }
 91     if (_subgraph_entry_fields != nullptr) {
 92       delete _subgraph_entry_fields;
 93     }
 94   };
 95 
 96   Klass* klass()            { return _k; }
 97   GrowableArray<Klass*>* subgraph_object_klasses() {
 98     return _subgraph_object_klasses;
 99   }
100   GrowableArray<int>* subgraph_entry_fields() {
101     return _subgraph_entry_fields;
102   }
103   void add_subgraph_entry_field(int static_field_offset, oop v);
104   void add_subgraph_object_klass(Klass *orig_k);
105   int num_subgraph_object_klasses() {
106     return _subgraph_object_klasses == nullptr ? 0 :
107            _subgraph_object_klasses->length();
108   }
109   bool is_full_module_graph() const { return _is_full_module_graph; }
110   bool has_non_early_klasses() const { return _has_non_early_klasses; }
111 };
112 
113 // An archived record of object sub-graphs reachable from static
114 // fields within _k's mirror. The record is reloaded from the archive
115 // at runtime.
116 class ArchivedKlassSubGraphInfoRecord {
117  private:
118   Klass* _k;
119   bool _is_full_module_graph;
120   bool _has_non_early_klasses;
121 
122   // contains pairs of field offset and value for each subgraph entry field
123   Array<int>* _entry_field_records;
124 
125   // klasses of objects in archived sub-graphs referenced from the entry points
126   // (static fields) in the containing class
127   Array<Klass*>* _subgraph_object_klasses;
128  public:
129   ArchivedKlassSubGraphInfoRecord() :
130     _k(nullptr), _entry_field_records(nullptr), _subgraph_object_klasses(nullptr) {}
131   void init(KlassSubGraphInfo* info);
132   Klass* klass() const { return _k; }
133   Array<int>* entry_field_records() const { return _entry_field_records; }
134   Array<Klass*>* subgraph_object_klasses() const { return _subgraph_object_klasses; }
135   bool is_full_module_graph() const { return _is_full_module_graph; }
136   bool has_non_early_klasses() const { return _has_non_early_klasses; }
137 };
138 #endif // INCLUDE_CDS_JAVA_HEAP
139 
140 struct LoadedArchiveHeapRegion;
141 
142 class HeapShared: AllStatic {
143   friend class VerifySharedOopClosure;
144 
145 public:
146   // Can this VM write a heap region into the CDS archive? Currently only {G1|Parallel|Serial}+compressed_cp
147   static bool can_write() {
148     CDS_JAVA_HEAP_ONLY(
149       if (_disable_writing) {
150         return false;
151       }
152       return (UseG1GC || UseParallelGC || UseSerialGC) && UseCompressedClassPointers;
153     )
154     NOT_CDS_JAVA_HEAP(return false;)
155   }
156 
157   static void disable_writing() {
158     CDS_JAVA_HEAP_ONLY(_disable_writing = true;)
159   }
160 
161   static bool is_subgraph_root_class(InstanceKlass* ik);
162 
163   // Scratch objects for archiving Klass::java_mirror()
164   static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
165   static oop scratch_java_mirror(Klass* k)    NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
166   static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
167 
168 private:
169 #if INCLUDE_CDS_JAVA_HEAP
170   static bool _disable_writing;
171   static DumpedInternedStrings *_dumped_interned_strings;
172 
173   // statistics
174   constexpr static int ALLOC_STAT_SLOTS = 16;
175   static size_t _alloc_count[ALLOC_STAT_SLOTS];
176   static size_t _alloc_size[ALLOC_STAT_SLOTS];
177   static size_t _total_obj_count;
178   static size_t _total_obj_size; // in HeapWords
179 
180   static void count_allocation(size_t size);
181   static void print_stats();
182 public:
183   static unsigned oop_hash(oop const& p);
184   static unsigned string_oop_hash(oop const& string) {
185     return java_lang_String::hash_code(string);
186   }
187 
188   class CachedOopInfo {
189     // Used by CDSHeapVerifier.
190     oop _orig_referrer;
191 
192     // The location of this object inside ArchiveHeapWriter::_buffer
193     size_t _buffer_offset;
194 
195     // One or more fields in this object are pointing to non-null oops.
196     bool _has_oop_pointers;
197 
198     // One or more fields in this object are pointing to MetaspaceObj
199     bool _has_native_pointers;
200   public:
201     CachedOopInfo(oop orig_referrer, bool has_oop_pointers)
202       : _orig_referrer(orig_referrer),
203         _buffer_offset(0),
204         _has_oop_pointers(has_oop_pointers),
205         _has_native_pointers(false) {}
206     oop orig_referrer()             const { return _orig_referrer;   }
207     void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
208     size_t buffer_offset()          const { return _buffer_offset;   }
209     bool has_oop_pointers()         const { return _has_oop_pointers; }
210     bool has_native_pointers()      const { return _has_native_pointers; }
211     void set_has_native_pointers()        { _has_native_pointers = true; }
212   };
213 
214 private:
215   static const int INITIAL_TABLE_SIZE = 15889; // prime number
216   static const int MAX_TABLE_SIZE     = 1000000;
217   typedef ResizeableResourceHashtable<oop, CachedOopInfo,
218       AnyObj::C_HEAP,
219       mtClassShared,
220       HeapShared::oop_hash> ArchivedObjectCache;
221   static ArchivedObjectCache* _archived_object_cache;
222 
223   class DumpTimeKlassSubGraphInfoTable
224     : public ResourceHashtable<Klass*, KlassSubGraphInfo,
225                                137, // prime number
226                                AnyObj::C_HEAP,
227                                mtClassShared,
228                                DumpTimeSharedClassTable_hash> {
229   public:
230     int _count;
231   };
232 
233 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
234   inline static bool record_equals_compact_hashtable_entry(
235        const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
236     return (value->klass() == key);
237   }
238 
239 private:
240   typedef OffsetCompactHashtable<
241     const Klass*,
242     const ArchivedKlassSubGraphInfoRecord*,
243     record_equals_compact_hashtable_entry
244     > RunTimeKlassSubGraphInfoTable;
245 
246   static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
247   static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
248 
249   static CachedOopInfo make_cached_oop_info(oop obj);
250   static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
251                                        bool is_full_module_graph);
252 
253   // Archive object sub-graph starting from the given static field
254   // in Klass k's mirror.
255   static void archive_reachable_objects_from_static_field(
256     InstanceKlass* k, const char* klass_name,
257     int field_offset, const char* field_name);
258 
259   static void verify_subgraph_from_static_field(
260     InstanceKlass* k, int field_offset) PRODUCT_RETURN;
261   static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
262   static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
263   static void check_default_subgraph_classes();
264 
265   static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
266   static KlassSubGraphInfo* get_subgraph_info(Klass *k);
267 
268   static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
269   static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
270 
271   // UseCompressedOops only: Used by decode_from_archive
272   static address _narrow_oop_base;
273   static int     _narrow_oop_shift;
274 
275   // !UseCompressedOops only: used to relocate pointers to the archived objects
276   static ptrdiff_t _runtime_delta;
277 
278   typedef ResizeableResourceHashtable<oop, bool,
279       AnyObj::C_HEAP,
280       mtClassShared,
281       HeapShared::oop_hash> SeenObjectsTable;
282 
283   static SeenObjectsTable *_seen_objects_table;
284 
285   // The "default subgraph" is the root of all archived objects that do not belong to any
286   // of the classes defined in the <xxx>_archive_subgraph_entry_fields[] arrays:
287   //    - interned strings
288   //    - Klass::java_mirror()
289   //    - ConstantPool::resolved_references()
290   static KlassSubGraphInfo* _default_subgraph_info;
291 
292   static GrowableArrayCHeap<oop, mtClassShared>* _pending_roots;
293   static GrowableArrayCHeap<OopHandle, mtClassShared>* _root_segments;
294   static int _root_segment_max_size_elems;
295   static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
296   static MetaspaceObjToOopHandleTable* _scratch_java_mirror_table;
297   static MetaspaceObjToOopHandleTable* _scratch_references_table;
298 
299   static void init_seen_objects_table() {
300     assert(_seen_objects_table == nullptr, "must be");
301     _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
302   }
303   static void delete_seen_objects_table() {
304     assert(_seen_objects_table != nullptr, "must be");
305     delete _seen_objects_table;
306     _seen_objects_table = nullptr;
307   }
308 
309   // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
310   static int _num_new_walked_objs;
311   static int _num_new_archived_objs;
312   static int _num_old_recorded_klasses;
313 
314   // Statistics (for all archived subgraphs)
315   static int _num_total_subgraph_recordings;
316   static int _num_total_walked_objs;
317   static int _num_total_archived_objs;
318   static int _num_total_recorded_klasses;
319   static int _num_total_verifications;
320 
321   static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
322                                        bool is_full_module_graph);
323   static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
324 
325   static bool has_been_seen_during_subgraph_recording(oop obj);
326   static void set_has_been_seen_during_subgraph_recording(oop obj);
327   static bool archive_object(oop obj);
328 
329   static void copy_interned_strings();
330 
331   static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
332   static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
333   static void clear_archived_roots_of(Klass* k);
334   static const ArchivedKlassSubGraphInfoRecord*
335                resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
336   static void resolve_or_init(Klass* k, bool do_init, TRAPS);
337   static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
338 
339   static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
340                                  MemRegion& archive_space);
341   static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
342                                   uintptr_t buffer);
343   static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
344                            int num_loaded_regions, uintptr_t buffer);
345   static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
346                                           int num_loaded_regions);
347   static void fill_failed_loaded_region();
348   static void mark_native_pointers(oop orig_obj);
349   static bool has_been_archived(oop orig_obj);
350   static void archive_java_mirrors();
351   static void archive_strings();
352  public:
353   static void reset_archived_object_states(TRAPS);
354   static void create_archived_object_cache() {
355     _archived_object_cache =
356       new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
357   }
358   static void destroy_archived_object_cache() {
359     delete _archived_object_cache;
360     _archived_object_cache = nullptr;
361   }
362   static ArchivedObjectCache* archived_object_cache() {
363     return _archived_object_cache;
364   }
365 
366   static int archive_exception_instance(oop exception);
367   static void archive_objects(ArchiveHeapInfo* heap_info);
368   static void copy_objects();
369   static void copy_special_objects();
370 
371   static bool archive_reachable_objects_from(int level,
372                                              KlassSubGraphInfo* subgraph_info,
373                                              oop orig_obj);
374 
375   static void add_to_dumped_interned_strings(oop string);
376 
377   // Scratch objects for archiving Klass::java_mirror()
378   static void set_scratch_java_mirror(Klass* k, oop mirror);
379   static void remove_scratch_objects(Klass* k);
380   static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
381   static void set_has_native_pointers(oop src_obj);
382 
383   // We use the HeapShared::roots() array to make sure that objects stored in the
384   // archived heap region are not prematurely collected. These roots include:
385   //
386   //    - mirrors of classes that have not yet been loaded.
387   //    - ConstantPool::resolved_references() of classes that have not yet been loaded.
388   //    - ArchivedKlassSubGraphInfoRecords that have not been initialized
389   //    - java.lang.Module objects that have not yet been added to the module graph
390   //
391   // When a mirror M becomes referenced by a newly loaded class K, M will be removed
392   // from HeapShared::roots() via clear_root(), and K will be responsible for
393   // keeping M alive.
394   //
395   // Other types of roots are also cleared similarly when they become referenced.
396 
397   // Dump-time only. Returns the index of the root, which can be used at run time to read
398   // the root using get_root(index, ...).
399   static int append_root(oop obj);
400   static GrowableArrayCHeap<oop, mtClassShared>* pending_roots() { return _pending_roots; }
401 
402   // Dump-time and runtime
403   static objArrayOop root_segment(int segment_idx);
404   static oop get_root(int index, bool clear=false);
405 
406   // Run-time only
407   static void clear_root(int index);
408 
409   static void get_segment_indexes(int index, int& segment_index, int& internal_index);
410 
411   static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
412 #endif // INCLUDE_CDS_JAVA_HEAP
413 
414  public:
415   static objArrayOop scratch_resolved_references(ConstantPool* src);
416   static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
417   static void init_scratch_objects(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
418   static bool is_heap_region(int idx) {
419     CDS_JAVA_HEAP_ONLY(return (idx == MetaspaceShared::hp);)
420     NOT_CDS_JAVA_HEAP_RETURN_(false);
421   }
422 
423   static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
424   static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
425 
426   static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
427   static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
428   static void add_root_segment(objArrayOop segment_oop) NOT_CDS_JAVA_HEAP_RETURN;
429   static void init_root_segment_sizes(int max_size_elems) NOT_CDS_JAVA_HEAP_RETURN;
430   static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
431 
432 #ifndef PRODUCT
433   static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
434 #endif
435 };
436 
437 #if INCLUDE_CDS_JAVA_HEAP
438 class DumpedInternedStrings :
439   public ResizeableResourceHashtable<oop, bool,
440                            AnyObj::C_HEAP,
441                            mtClassShared,
442                            HeapShared::string_oop_hash>
443 {
444 public:
445   DumpedInternedStrings(unsigned size, unsigned max_size) :
446     ResizeableResourceHashtable<oop, bool,
447                                 AnyObj::C_HEAP,
448                                 mtClassShared,
449                                 HeapShared::string_oop_hash>(size, max_size) {}
450 };
451 #endif
452 
453 #endif // SHARE_CDS_HEAPSHARED_HPP