8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CDS_HEAPSHARED_HPP
26 #define SHARE_CDS_HEAPSHARED_HPP
27
28 #include "cds/dumpTimeClassInfo.hpp"
29 #include "cds/metaspaceShared.hpp"
30 #include "classfile/compactHashtable.hpp"
31 #include "classfile/javaClasses.hpp"
32 #include "gc/shared/gc_globals.hpp"
33 #include "memory/allocation.hpp"
34 #include "memory/allStatic.hpp"
35 #include "oops/compressedOops.hpp"
36 #include "oops/oop.hpp"
37 #include "oops/oopHandle.hpp"
38 #include "oops/oopsHierarchy.hpp"
39 #include "utilities/growableArray.hpp"
40 #include "utilities/resourceHash.hpp"
41
42 #if INCLUDE_CDS_JAVA_HEAP
43 class DumpedInternedStrings;
44 class FileMapInfo;
45 class KlassSubGraphInfo;
46 class MetaspaceObjToOopHandleTable;
47 class ResourceBitMap;
126 // (static fields) in the containing class
127 Array<Klass*>* _subgraph_object_klasses;
128 public:
129 ArchivedKlassSubGraphInfoRecord() :
130 _k(nullptr), _entry_field_records(nullptr), _subgraph_object_klasses(nullptr) {}
131 void init(KlassSubGraphInfo* info);
132 Klass* klass() const { return _k; }
133 Array<int>* entry_field_records() const { return _entry_field_records; }
134 Array<Klass*>* subgraph_object_klasses() const { return _subgraph_object_klasses; }
135 bool is_full_module_graph() const { return _is_full_module_graph; }
136 bool has_non_early_klasses() const { return _has_non_early_klasses; }
137 };
138 #endif // INCLUDE_CDS_JAVA_HEAP
139
140 struct LoadedArchiveHeapRegion;
141
142 class HeapShared: AllStatic {
143 friend class VerifySharedOopClosure;
144
145 public:
146 // Can this VM write a heap region into the CDS archive? Currently only {G1|Parallel|Serial}+compressed_cp
147 static bool can_write() {
148 CDS_JAVA_HEAP_ONLY(
149 if (_disable_writing) {
150 return false;
151 }
152 return (UseG1GC || UseParallelGC || UseSerialGC) && UseCompressedClassPointers;
153 )
154 NOT_CDS_JAVA_HEAP(return false;)
155 }
156
157 static void disable_writing() {
158 CDS_JAVA_HEAP_ONLY(_disable_writing = true;)
159 }
160
161 static bool is_subgraph_root_class(InstanceKlass* ik);
162
163 // Scratch objects for archiving Klass::java_mirror()
164 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
165 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
166 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
167
168 private:
169 #if INCLUDE_CDS_JAVA_HEAP
170 static bool _disable_writing;
171 static DumpedInternedStrings *_dumped_interned_strings;
172
173 // statistics
174 constexpr static int ALLOC_STAT_SLOTS = 16;
175 static size_t _alloc_count[ALLOC_STAT_SLOTS];
176 static size_t _alloc_size[ALLOC_STAT_SLOTS];
177 static size_t _total_obj_count;
178 static size_t _total_obj_size; // in HeapWords
179
180 static void count_allocation(size_t size);
181 static void print_stats();
182 public:
183 static unsigned oop_hash(oop const& p);
184 static unsigned string_oop_hash(oop const& string) {
185 return java_lang_String::hash_code(string);
186 }
187
188 class CachedOopInfo {
189 // Used by CDSHeapVerifier.
190 oop _orig_referrer;
191
192 // The location of this object inside ArchiveHeapWriter::_buffer
193 size_t _buffer_offset;
194
195 // One or more fields in this object are pointing to non-null oops.
196 bool _has_oop_pointers;
197
198 // One or more fields in this object are pointing to MetaspaceObj
199 bool _has_native_pointers;
200 public:
201 CachedOopInfo(oop orig_referrer, bool has_oop_pointers)
202 : _orig_referrer(orig_referrer),
203 _buffer_offset(0),
204 _has_oop_pointers(has_oop_pointers),
205 _has_native_pointers(false) {}
206 oop orig_referrer() const { return _orig_referrer; }
207 void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
229 public:
230 int _count;
231 };
232
233 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
234 inline static bool record_equals_compact_hashtable_entry(
235 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
236 return (value->klass() == key);
237 }
238
239 private:
240 typedef OffsetCompactHashtable<
241 const Klass*,
242 const ArchivedKlassSubGraphInfoRecord*,
243 record_equals_compact_hashtable_entry
244 > RunTimeKlassSubGraphInfoTable;
245
246 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
247 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
248
249 static CachedOopInfo make_cached_oop_info(oop obj);
250 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
251 bool is_full_module_graph);
252
253 // Archive object sub-graph starting from the given static field
254 // in Klass k's mirror.
255 static void archive_reachable_objects_from_static_field(
256 InstanceKlass* k, const char* klass_name,
257 int field_offset, const char* field_name);
258
259 static void verify_subgraph_from_static_field(
260 InstanceKlass* k, int field_offset) PRODUCT_RETURN;
261 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
262 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
263 static void check_default_subgraph_classes();
264
265 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
266 static KlassSubGraphInfo* get_subgraph_info(Klass *k);
267
268 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
269 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
271 // UseCompressedOops only: Used by decode_from_archive
272 static address _narrow_oop_base;
273 static int _narrow_oop_shift;
274
275 // !UseCompressedOops only: used to relocate pointers to the archived objects
276 static ptrdiff_t _runtime_delta;
277
278 typedef ResizeableResourceHashtable<oop, bool,
279 AnyObj::C_HEAP,
280 mtClassShared,
281 HeapShared::oop_hash> SeenObjectsTable;
282
283 static SeenObjectsTable *_seen_objects_table;
284
285 // The "default subgraph" is the root of all archived objects that do not belong to any
286 // of the classes defined in the <xxx>_archive_subgraph_entry_fields[] arrays:
287 // - interned strings
288 // - Klass::java_mirror()
289 // - ConstantPool::resolved_references()
290 static KlassSubGraphInfo* _default_subgraph_info;
291
292 static GrowableArrayCHeap<oop, mtClassShared>* _pending_roots;
293 static GrowableArrayCHeap<OopHandle, mtClassShared>* _root_segments;
294 static int _root_segment_max_size_elems;
295 static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
296 static MetaspaceObjToOopHandleTable* _scratch_java_mirror_table;
297 static MetaspaceObjToOopHandleTable* _scratch_references_table;
298
299 static void init_seen_objects_table() {
300 assert(_seen_objects_table == nullptr, "must be");
301 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
302 }
303 static void delete_seen_objects_table() {
304 assert(_seen_objects_table != nullptr, "must be");
305 delete _seen_objects_table;
306 _seen_objects_table = nullptr;
307 }
308
309 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
310 static int _num_new_walked_objs;
311 static int _num_new_archived_objs;
312 static int _num_old_recorded_klasses;
313
314 // Statistics (for all archived subgraphs)
315 static int _num_total_subgraph_recordings;
316 static int _num_total_walked_objs;
317 static int _num_total_archived_objs;
318 static int _num_total_recorded_klasses;
319 static int _num_total_verifications;
320
321 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
322 bool is_full_module_graph);
323 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
324
325 static bool has_been_seen_during_subgraph_recording(oop obj);
326 static void set_has_been_seen_during_subgraph_recording(oop obj);
327 static bool archive_object(oop obj);
328
329 static void copy_interned_strings();
330
331 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
332 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
333 static void clear_archived_roots_of(Klass* k);
334 static const ArchivedKlassSubGraphInfoRecord*
335 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
336 static void resolve_or_init(Klass* k, bool do_init, TRAPS);
337 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
338
339 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
340 MemRegion& archive_space);
341 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
342 uintptr_t buffer);
343 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
344 int num_loaded_regions, uintptr_t buffer);
345 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
346 int num_loaded_regions);
347 static void fill_failed_loaded_region();
348 static void mark_native_pointers(oop orig_obj);
349 static bool has_been_archived(oop orig_obj);
350 static void archive_java_mirrors();
351 static void archive_strings();
352 public:
353 static void reset_archived_object_states(TRAPS);
354 static void create_archived_object_cache() {
355 _archived_object_cache =
356 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
357 }
358 static void destroy_archived_object_cache() {
359 delete _archived_object_cache;
360 _archived_object_cache = nullptr;
361 }
362 static ArchivedObjectCache* archived_object_cache() {
363 return _archived_object_cache;
364 }
365
366 static int archive_exception_instance(oop exception);
367 static void archive_objects(ArchiveHeapInfo* heap_info);
368 static void copy_objects();
369 static void copy_special_objects();
370
371 static bool archive_reachable_objects_from(int level,
372 KlassSubGraphInfo* subgraph_info,
373 oop orig_obj);
374
375 static void add_to_dumped_interned_strings(oop string);
376
377 // Scratch objects for archiving Klass::java_mirror()
378 static void set_scratch_java_mirror(Klass* k, oop mirror);
379 static void remove_scratch_objects(Klass* k);
380 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
381 static void set_has_native_pointers(oop src_obj);
382
383 // We use the HeapShared::roots() array to make sure that objects stored in the
384 // archived heap region are not prematurely collected. These roots include:
385 //
386 // - mirrors of classes that have not yet been loaded.
387 // - ConstantPool::resolved_references() of classes that have not yet been loaded.
388 // - ArchivedKlassSubGraphInfoRecords that have not been initialized
389 // - java.lang.Module objects that have not yet been added to the module graph
390 //
391 // When a mirror M becomes referenced by a newly loaded class K, M will be removed
392 // from HeapShared::roots() via clear_root(), and K will be responsible for
393 // keeping M alive.
394 //
395 // Other types of roots are also cleared similarly when they become referenced.
396
397 // Dump-time only. Returns the index of the root, which can be used at run time to read
398 // the root using get_root(index, ...).
399 static int append_root(oop obj);
400 static GrowableArrayCHeap<oop, mtClassShared>* pending_roots() { return _pending_roots; }
401
402 // Dump-time and runtime
403 static objArrayOop root_segment(int segment_idx);
404 static oop get_root(int index, bool clear=false);
405
406 // Run-time only
407 static void clear_root(int index);
408
409 static void get_segment_indexes(int index, int& segment_index, int& internal_index);
410
411 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
412 #endif // INCLUDE_CDS_JAVA_HEAP
413
414 public:
415 static objArrayOop scratch_resolved_references(ConstantPool* src);
416 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
417 static void init_scratch_objects(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
418 static bool is_heap_region(int idx) {
419 CDS_JAVA_HEAP_ONLY(return (idx == MetaspaceShared::hp);)
420 NOT_CDS_JAVA_HEAP_RETURN_(false);
421 }
422
423 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
424 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
425
426 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
427 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
428 static void add_root_segment(objArrayOop segment_oop) NOT_CDS_JAVA_HEAP_RETURN;
429 static void init_root_segment_sizes(int max_size_elems) NOT_CDS_JAVA_HEAP_RETURN;
430 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
431
432 #ifndef PRODUCT
433 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
434 #endif
435 };
436
437 #if INCLUDE_CDS_JAVA_HEAP
438 class DumpedInternedStrings :
439 public ResizeableResourceHashtable<oop, bool,
440 AnyObj::C_HEAP,
441 mtClassShared,
442 HeapShared::string_oop_hash>
443 {
444 public:
445 DumpedInternedStrings(unsigned size, unsigned max_size) :
446 ResizeableResourceHashtable<oop, bool,
447 AnyObj::C_HEAP,
448 mtClassShared,
449 HeapShared::string_oop_hash>(size, max_size) {}
450 };
451 #endif
452
453 #endif // SHARE_CDS_HEAPSHARED_HPP
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CDS_HEAPSHARED_HPP
26 #define SHARE_CDS_HEAPSHARED_HPP
27
28 #include "cds/cds_globals.hpp"
29 #include "cds/dumpTimeClassInfo.hpp"
30 #include "cds/metaspaceShared.hpp"
31 #include "classfile/compactHashtable.hpp"
32 #include "classfile/javaClasses.hpp"
33 #include "gc/shared/gc_globals.hpp"
34 #include "memory/allocation.hpp"
35 #include "memory/allStatic.hpp"
36 #include "oops/compressedOops.hpp"
37 #include "oops/oop.hpp"
38 #include "oops/oopHandle.hpp"
39 #include "oops/oopsHierarchy.hpp"
40 #include "utilities/growableArray.hpp"
41 #include "utilities/resourceHash.hpp"
42
43 #if INCLUDE_CDS_JAVA_HEAP
44 class DumpedInternedStrings;
45 class FileMapInfo;
46 class KlassSubGraphInfo;
47 class MetaspaceObjToOopHandleTable;
48 class ResourceBitMap;
127 // (static fields) in the containing class
128 Array<Klass*>* _subgraph_object_klasses;
129 public:
130 ArchivedKlassSubGraphInfoRecord() :
131 _k(nullptr), _entry_field_records(nullptr), _subgraph_object_klasses(nullptr) {}
132 void init(KlassSubGraphInfo* info);
133 Klass* klass() const { return _k; }
134 Array<int>* entry_field_records() const { return _entry_field_records; }
135 Array<Klass*>* subgraph_object_klasses() const { return _subgraph_object_klasses; }
136 bool is_full_module_graph() const { return _is_full_module_graph; }
137 bool has_non_early_klasses() const { return _has_non_early_klasses; }
138 };
139 #endif // INCLUDE_CDS_JAVA_HEAP
140
141 struct LoadedArchiveHeapRegion;
142
143 class HeapShared: AllStatic {
144 friend class VerifySharedOopClosure;
145
146 public:
147 // Can this VM write a heap region into the CDS archive? Currently only {G1|Parallel|Serial|Epsilon|Shenandoah}+compressed_cp
148 static bool can_write() {
149 CDS_JAVA_HEAP_ONLY(
150 if (_disable_writing) {
151 return false;
152 }
153 return (UseG1GC || UseParallelGC || UseSerialGC || UseEpsilonGC || UseShenandoahGC) && UseCompressedClassPointers;
154 )
155 NOT_CDS_JAVA_HEAP(return false;)
156 }
157
158 static void disable_writing() {
159 CDS_JAVA_HEAP_ONLY(_disable_writing = true;)
160 }
161
162 static bool is_subgraph_root_class(InstanceKlass* ik);
163
164 // Scratch objects for archiving Klass::java_mirror()
165 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
166 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
167 static oop scratch_java_mirror(oop java_mirror) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
168 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
169
170 static void start_finding_archivable_hidden_classes() NOT_CDS_JAVA_HEAP_RETURN;
171 static void find_archivable_hidden_classes_in_object(oop o) NOT_CDS_JAVA_HEAP_RETURN;
172 static void end_finding_archivable_hidden_classes() NOT_CDS_JAVA_HEAP_RETURN;
173
174 private:
175 #if INCLUDE_CDS_JAVA_HEAP
176 static bool _disable_writing;
177 static DumpedInternedStrings *_dumped_interned_strings;
178
179 // statistics
180 constexpr static int ALLOC_STAT_SLOTS = 16;
181 static size_t _alloc_count[ALLOC_STAT_SLOTS];
182 static size_t _alloc_size[ALLOC_STAT_SLOTS];
183 static size_t _total_obj_count;
184 static size_t _total_obj_size; // in HeapWords
185
186 static void count_allocation(size_t size);
187 static void print_stats();
188 static void debug_trace();
189 public:
190 static unsigned oop_hash(oop const& p);
191 static unsigned string_oop_hash(oop const& string) {
192 return java_lang_String::hash_code(string);
193 }
194
195 class CopyKlassSubGraphInfoToArchive;
196
197 class CachedOopInfo {
198 // Used by CDSHeapVerifier.
199 oop _orig_referrer;
200
201 // The location of this object inside ArchiveHeapWriter::_buffer
202 size_t _buffer_offset;
203
204 // One or more fields in this object are pointing to non-null oops.
205 bool _has_oop_pointers;
206
207 // One or more fields in this object are pointing to MetaspaceObj
208 bool _has_native_pointers;
209 public:
210 CachedOopInfo(oop orig_referrer, bool has_oop_pointers)
211 : _orig_referrer(orig_referrer),
212 _buffer_offset(0),
213 _has_oop_pointers(has_oop_pointers),
214 _has_native_pointers(false) {}
215 oop orig_referrer() const { return _orig_referrer; }
216 void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
238 public:
239 int _count;
240 };
241
242 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
243 inline static bool record_equals_compact_hashtable_entry(
244 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
245 return (value->klass() == key);
246 }
247
248 private:
249 typedef OffsetCompactHashtable<
250 const Klass*,
251 const ArchivedKlassSubGraphInfoRecord*,
252 record_equals_compact_hashtable_entry
253 > RunTimeKlassSubGraphInfoTable;
254
255 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
256 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
257
258 class FindHiddenClassesOopClosure;
259 static void find_archivable_hidden_classes_helper(ArchivableStaticFieldInfo fields[]);
260
261 static CachedOopInfo make_cached_oop_info(oop obj);
262 static ArchivedKlassSubGraphInfoRecord* archive_subgraph_info(KlassSubGraphInfo* info);
263 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
264 bool is_full_module_graph);
265
266 // Archive object sub-graph starting from the given static field
267 // in Klass k's mirror.
268 static void archive_reachable_objects_from_static_field(
269 InstanceKlass* k, const char* klass_name,
270 int field_offset, const char* field_name);
271
272 static void verify_subgraph_from_static_field(
273 InstanceKlass* k, int field_offset) PRODUCT_RETURN;
274 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
275 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
276 static void check_default_subgraph_classes();
277
278 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
279 static KlassSubGraphInfo* get_subgraph_info(Klass *k);
280
281 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
282 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
284 // UseCompressedOops only: Used by decode_from_archive
285 static address _narrow_oop_base;
286 static int _narrow_oop_shift;
287
288 // !UseCompressedOops only: used to relocate pointers to the archived objects
289 static ptrdiff_t _runtime_delta;
290
291 typedef ResizeableResourceHashtable<oop, bool,
292 AnyObj::C_HEAP,
293 mtClassShared,
294 HeapShared::oop_hash> SeenObjectsTable;
295
296 static SeenObjectsTable *_seen_objects_table;
297
298 // The "default subgraph" is the root of all archived objects that do not belong to any
299 // of the classes defined in the <xxx>_archive_subgraph_entry_fields[] arrays:
300 // - interned strings
301 // - Klass::java_mirror()
302 // - ConstantPool::resolved_references()
303 static KlassSubGraphInfo* _default_subgraph_info;
304 static ArchivedKlassSubGraphInfoRecord* _runtime_default_subgraph_info;
305
306 static GrowableArrayCHeap<OopHandle, mtClassShared>* _pending_roots;
307 static GrowableArrayCHeap<oop, mtClassShared>* _trace; // for debugging unarchivable objects
308 static GrowableArrayCHeap<const char*, mtClassShared>* _context; // for debugging unarchivable objects
309 static GrowableArrayCHeap<OopHandle, mtClassShared>* _root_segments;
310 static int _root_segment_max_size_elems;
311 static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
312 static MetaspaceObjToOopHandleTable* _scratch_java_mirror_table;
313 static MetaspaceObjToOopHandleTable* _scratch_references_table;
314
315 static void init_seen_objects_table() {
316 assert(_seen_objects_table == nullptr, "must be");
317 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
318 }
319 static void delete_seen_objects_table() {
320 assert(_seen_objects_table != nullptr, "must be");
321 delete _seen_objects_table;
322 _seen_objects_table = nullptr;
323 }
324
325 class ArchivingObjectMark;
326 class ContextMark;
327
328 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
329 static int _num_new_walked_objs;
330 static int _num_new_archived_objs;
331 static int _num_old_recorded_klasses;
332
333 // Statistics (for all archived subgraphs)
334 static int _num_total_subgraph_recordings;
335 static int _num_total_walked_objs;
336 static int _num_total_archived_objs;
337 static int _num_total_recorded_klasses;
338 static int _num_total_verifications;
339
340 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
341 bool is_full_module_graph);
342 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
343
344 static bool has_been_seen_during_subgraph_recording(oop obj);
345 static void set_has_been_seen_during_subgraph_recording(oop obj);
346 static bool archive_object(oop obj);
347 static void copy_aot_initialized_mirror(Klass* orig_k, oop orig_mirror, oop m);
348 static void copy_interned_strings();
349
350 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
351 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
352 static void clear_archived_roots_of(Klass* k);
353 static const ArchivedKlassSubGraphInfoRecord*
354 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
355 static void resolve_or_init(const char* klass_name, bool do_init, TRAPS);
356 static void resolve_or_init(Klass* k, bool do_init, TRAPS);
357 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
358
359 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
360 MemRegion& archive_space);
361 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
362 uintptr_t buffer);
363 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
364 int num_loaded_regions, uintptr_t buffer);
365 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
366 int num_loaded_regions);
367 static void fill_failed_loaded_region();
368 static void mark_native_pointers(oop orig_obj);
369 static bool has_been_archived(oop orig_obj);
370 static bool can_mirror_be_used_in_subgraph(oop orig_java_mirror);
371 static void archive_java_mirrors();
372 static void archive_strings();
373 static int get_archived_object_permanent_index_locked(oop obj);
374
375 public:
376 static void exit_on_error();
377 static void reset_archived_object_states(TRAPS);
378 static void create_archived_object_cache() {
379 _archived_object_cache =
380 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
381 }
382 static void destroy_archived_object_cache() {
383 delete _archived_object_cache;
384 _archived_object_cache = nullptr;
385 }
386 static ArchivedObjectCache* archived_object_cache() {
387 return _archived_object_cache;
388 }
389
390 static int archive_exception_instance(oop exception);
391 static void archive_objects(ArchiveHeapInfo* heap_info);
392 static void copy_objects();
393 static void copy_special_objects();
394
395 static bool archive_reachable_objects_from(int level,
396 KlassSubGraphInfo* subgraph_info,
397 oop orig_obj);
398
399 static void add_to_dumped_interned_strings(oop string);
400
401 static void track_scratch_object(oop orig_obj, oop scratch_obj);
402
403 // Scratch objects for archiving Klass::java_mirror()
404 static void set_scratch_java_mirror(Klass* k, oop mirror);
405 static void remove_scratch_objects(Klass* k);
406 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
407 static void set_has_native_pointers(oop src_obj);
408
409 // We use the HeapShared::roots() array to make sure that objects stored in the
410 // archived heap region are not prematurely collected. These roots include:
411 //
412 // - mirrors of classes that have not yet been loaded.
413 // - ConstantPool::resolved_references() of classes that have not yet been loaded.
414 // - ArchivedKlassSubGraphInfoRecords that have not been initialized
415 // - java.lang.Module objects that have not yet been added to the module graph
416 //
417 // When a mirror M becomes referenced by a newly loaded class K, M will be removed
418 // from HeapShared::roots() via clear_root(), and K will be responsible for
419 // keeping M alive.
420 //
421 // Other types of roots are also cleared similarly when they become referenced.
422
423 // Dump-time only. Returns the index of the root, which can be used at run time to read
424 // the root using get_root(index, ...).
425 static int append_root(oop obj);
426 static GrowableArrayCHeap<OopHandle, mtClassShared>* pending_roots() { return _pending_roots; }
427
428 // Dump-time and runtime
429 static objArrayOop root_segment(int segment_idx);
430 static oop get_root(int index, bool clear=false);
431
432 // Run-time only
433 static void clear_root(int index);
434 static void get_segment_indexes(int index, int& segment_index, int& internal_index);
435 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
436 #endif // INCLUDE_CDS_JAVA_HEAP
437
438 public:
439 static oop orig_to_scratch_object(oop orig_obj);
440 static objArrayOop scratch_resolved_references(ConstantPool* src);
441 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
442 static void init_scratch_objects(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
443 static void init_box_classes(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
444 static bool is_heap_region(int idx) {
445 CDS_JAVA_HEAP_ONLY(return (idx == MetaspaceShared::hp);)
446 NOT_CDS_JAVA_HEAP_RETURN_(false);
447 }
448
449 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
450 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
451
452 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
453 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
454 static void add_root_segment(objArrayOop segment_oop) NOT_CDS_JAVA_HEAP_RETURN;
455 static void init_root_segment_sizes(int max_size_elems) NOT_CDS_JAVA_HEAP_RETURN;
456 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
457
458 #ifndef PRODUCT
459 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
460 static void initialize_test_class_from_archive(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
461 #endif
462
463 static void add_to_permanent_index_table(oop obj);
464
465 // AOT-compile time only: get a stable index for an archived object.
466 // Returns 0 if obj is not archived.
467 static int get_archived_object_permanent_index(oop obj) NOT_CDS_JAVA_HEAP_RETURN_(-1);
468 // Runtime only: get back the same object for an index returned by
469 // get_archived_object_permanent_index().
470 static oop get_archived_object(int permanent_index) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
471
472 static void initialize_java_lang_invoke(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
473 static void initialize_default_subgraph_classes(Handle loader, TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
474
475 static bool is_lambda_form_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
476 static bool is_lambda_proxy_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
477 static bool is_archivable_hidden_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
478 };
479
480 #if INCLUDE_CDS_JAVA_HEAP
481 class DumpedInternedStrings :
482 public ResizeableResourceHashtable<oop, bool,
483 AnyObj::C_HEAP,
484 mtClassShared,
485 HeapShared::string_oop_hash>
486 {
487 public:
488 DumpedInternedStrings(unsigned size, unsigned max_size) :
489 ResizeableResourceHashtable<oop, bool,
490 AnyObj::C_HEAP,
491 mtClassShared,
492 HeapShared::string_oop_hash>(size, max_size) {}
493 };
494 #endif
495
496 #endif // SHARE_CDS_HEAPSHARED_HPP
|