8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CDS_HEAPSHARED_HPP
26 #define SHARE_CDS_HEAPSHARED_HPP
27
28 #include "cds/dumpTimeClassInfo.hpp"
29 #include "cds/metaspaceShared.hpp"
30 #include "classfile/compactHashtable.hpp"
31 #include "classfile/javaClasses.hpp"
32 #include "gc/shared/gc_globals.hpp"
33 #include "memory/allocation.hpp"
34 #include "memory/allStatic.hpp"
35 #include "oops/compressedOops.hpp"
36 #include "oops/oop.hpp"
37 #include "oops/oopHandle.hpp"
38 #include "oops/oopsHierarchy.hpp"
39 #include "utilities/growableArray.hpp"
40 #include "utilities/resourceHash.hpp"
41
42 #if INCLUDE_CDS_JAVA_HEAP
43 class DumpedInternedStrings;
44 class FileMapInfo;
45 class KlassSubGraphInfo;
46 class MetaspaceObjToOopHandleTable;
47 class ResourceBitMap;
144
145 public:
146 // Can this VM write a heap region into the CDS archive? Currently only G1+compressed{oops,cp}
147 static bool can_write() {
148 CDS_JAVA_HEAP_ONLY(
149 if (_disable_writing) {
150 return false;
151 }
152 return (UseG1GC && UseCompressedClassPointers);
153 )
154 NOT_CDS_JAVA_HEAP(return false;)
155 }
156
157 static void disable_writing() {
158 CDS_JAVA_HEAP_ONLY(_disable_writing = true;)
159 }
160
161 static bool is_subgraph_root_class(InstanceKlass* ik);
162
163 // Scratch objects for archiving Klass::java_mirror()
164 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
165 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
166 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
167
168 private:
169 #if INCLUDE_CDS_JAVA_HEAP
170 static bool _disable_writing;
171 static DumpedInternedStrings *_dumped_interned_strings;
172
173 // statistics
174 constexpr static int ALLOC_STAT_SLOTS = 16;
175 static size_t _alloc_count[ALLOC_STAT_SLOTS];
176 static size_t _alloc_size[ALLOC_STAT_SLOTS];
177 static size_t _total_obj_count;
178 static size_t _total_obj_size; // in HeapWords
179
180 static void count_allocation(size_t size);
181 static void print_stats();
182 public:
183 static unsigned oop_hash(oop const& p);
184 static unsigned string_oop_hash(oop const& string) {
185 return java_lang_String::hash_code(string);
186 }
187
188 class CachedOopInfo {
189 // Used by CDSHeapVerifier.
190 oop _orig_referrer;
191
192 // The location of this object inside ArchiveHeapWriter::_buffer
193 size_t _buffer_offset;
194
195 // One or more fields in this object are pointing to non-null oops.
196 bool _has_oop_pointers;
197
198 // One or more fields in this object are pointing to MetaspaceObj
199 bool _has_native_pointers;
200 public:
201 CachedOopInfo(oop orig_referrer, bool has_oop_pointers)
202 : _orig_referrer(orig_referrer),
203 _buffer_offset(0),
204 _has_oop_pointers(has_oop_pointers),
205 _has_native_pointers(false) {}
206 oop orig_referrer() const { return _orig_referrer; }
207 void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
233 int _count;
234 };
235
236 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
237 inline static bool record_equals_compact_hashtable_entry(
238 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
239 return (value->klass() == key);
240 }
241
242 private:
243 typedef OffsetCompactHashtable<
244 const Klass*,
245 const ArchivedKlassSubGraphInfoRecord*,
246 record_equals_compact_hashtable_entry
247 > RunTimeKlassSubGraphInfoTable;
248
249 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
250 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
251
252 static CachedOopInfo make_cached_oop_info(oop obj);
253 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
254 bool is_full_module_graph);
255
256 // Archive object sub-graph starting from the given static field
257 // in Klass k's mirror.
258 static void archive_reachable_objects_from_static_field(
259 InstanceKlass* k, const char* klass_name,
260 int field_offset, const char* field_name);
261
262 static void verify_subgraph_from_static_field(
263 InstanceKlass* k, int field_offset) PRODUCT_RETURN;
264 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
265 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
266 static void check_default_subgraph_classes();
267
268 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
269 static KlassSubGraphInfo* get_subgraph_info(Klass *k);
270
271 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
272 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
274 // UseCompressedOops only: Used by decode_from_archive
275 static address _narrow_oop_base;
276 static int _narrow_oop_shift;
277
278 // !UseCompressedOops only: used to relocate pointers to the archived objects
279 static ptrdiff_t _runtime_delta;
280
281 typedef ResizeableResourceHashtable<oop, bool,
282 AnyObj::C_HEAP,
283 mtClassShared,
284 HeapShared::oop_hash> SeenObjectsTable;
285
286 static SeenObjectsTable *_seen_objects_table;
287
288 // The "default subgraph" is the root of all archived objects that do not belong to any
289 // of the classes defined in the <xxx>_archive_subgraph_entry_fields[] arrays:
290 // - interned strings
291 // - Klass::java_mirror()
292 // - ConstantPool::resolved_references()
293 static KlassSubGraphInfo* _default_subgraph_info;
294
295 static GrowableArrayCHeap<oop, mtClassShared>* _pending_roots;
296 static OopHandle _roots;
297 static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
298 static MetaspaceObjToOopHandleTable* _scratch_java_mirror_table;
299 static MetaspaceObjToOopHandleTable* _scratch_references_table;
300
301 static void init_seen_objects_table() {
302 assert(_seen_objects_table == nullptr, "must be");
303 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
304 }
305 static void delete_seen_objects_table() {
306 assert(_seen_objects_table != nullptr, "must be");
307 delete _seen_objects_table;
308 _seen_objects_table = nullptr;
309 }
310
311 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
312 static int _num_new_walked_objs;
313 static int _num_new_archived_objs;
314 static int _num_old_recorded_klasses;
315
316 // Statistics (for all archived subgraphs)
317 static int _num_total_subgraph_recordings;
318 static int _num_total_walked_objs;
319 static int _num_total_archived_objs;
320 static int _num_total_recorded_klasses;
321 static int _num_total_verifications;
322
323 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
324 bool is_full_module_graph);
325 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
326
327 static bool has_been_seen_during_subgraph_recording(oop obj);
328 static void set_has_been_seen_during_subgraph_recording(oop obj);
329 static bool archive_object(oop obj);
330
331 static void copy_interned_strings();
332
333 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
334 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
335 static void clear_archived_roots_of(Klass* k);
336 static const ArchivedKlassSubGraphInfoRecord*
337 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
338 static void resolve_or_init(Klass* k, bool do_init, TRAPS);
339 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
340
341 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
342 MemRegion& archive_space);
343 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
344 uintptr_t buffer);
345 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
346 int num_loaded_regions, uintptr_t buffer);
347 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
348 int num_loaded_regions);
349 static void fill_failed_loaded_region();
350 static void mark_native_pointers(oop orig_obj);
351 static bool has_been_archived(oop orig_obj);
352 static void archive_java_mirrors();
353 static void archive_strings();
354 public:
355 static void reset_archived_object_states(TRAPS);
356 static void create_archived_object_cache() {
357 _archived_object_cache =
358 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
359 }
360 static void destroy_archived_object_cache() {
361 delete _archived_object_cache;
362 _archived_object_cache = nullptr;
363 }
364 static ArchivedObjectCache* archived_object_cache() {
365 return _archived_object_cache;
366 }
367
368 static int archive_exception_instance(oop exception);
369 static void archive_objects(ArchiveHeapInfo* heap_info);
370 static void copy_objects();
371 static void copy_special_objects();
372
373 static bool archive_reachable_objects_from(int level,
374 KlassSubGraphInfo* subgraph_info,
375 oop orig_obj);
376
377 static ResourceBitMap calculate_oopmap(MemRegion region); // marks all the oop pointers
378 static void add_to_dumped_interned_strings(oop string);
379
380 // Scratch objects for archiving Klass::java_mirror()
381 static void set_scratch_java_mirror(Klass* k, oop mirror);
382 static void remove_scratch_objects(Klass* k);
383 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
384 static void set_has_native_pointers(oop src_obj);
385
386 // We use the HeapShared::roots() array to make sure that objects stored in the
387 // archived heap region are not prematurely collected. These roots include:
388 //
389 // - mirrors of classes that have not yet been loaded.
390 // - ConstantPool::resolved_references() of classes that have not yet been loaded.
391 // - ArchivedKlassSubGraphInfoRecords that have not been initialized
392 // - java.lang.Module objects that have not yet been added to the module graph
393 //
394 // When a mirror M becomes referenced by a newly loaded class K, M will be removed
395 // from HeapShared::roots() via clear_root(), and K will be responsible for
396 // keeping M alive.
397 //
398 // Other types of roots are also cleared similarly when they become referenced.
399
400 // Dump-time only. Returns the index of the root, which can be used at run time to read
401 // the root using get_root(index, ...).
402 static int append_root(oop obj);
403 static GrowableArrayCHeap<oop, mtClassShared>* pending_roots() { return _pending_roots; }
404
405 // Dump-time and runtime
406 static objArrayOop roots();
407 static oop get_root(int index, bool clear=false);
408
409 // Run-time only
410 static void clear_root(int index);
411
412 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
413 #endif // INCLUDE_CDS_JAVA_HEAP
414
415 public:
416 static objArrayOop scratch_resolved_references(ConstantPool* src);
417 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
418 static void init_scratch_objects(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
419 static bool is_heap_region(int idx) {
420 CDS_JAVA_HEAP_ONLY(return (idx == MetaspaceShared::hp);)
421 NOT_CDS_JAVA_HEAP_RETURN_(false);
422 }
423
424 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
425 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
426
427 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
428 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
429 static void init_roots(oop roots_oop) NOT_CDS_JAVA_HEAP_RETURN;
430 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
431 static bool initialize_enum_klass(InstanceKlass* k, TRAPS) NOT_CDS_JAVA_HEAP_RETURN_(false);
432
433 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
434 };
435
436 #if INCLUDE_CDS_JAVA_HEAP
437 class DumpedInternedStrings :
438 public ResizeableResourceHashtable<oop, bool,
439 AnyObj::C_HEAP,
440 mtClassShared,
441 HeapShared::string_oop_hash>
442 {
443 public:
444 DumpedInternedStrings(unsigned size, unsigned max_size) :
445 ResizeableResourceHashtable<oop, bool,
446 AnyObj::C_HEAP,
447 mtClassShared,
448 HeapShared::string_oop_hash>(size, max_size) {}
449 };
450 #endif
451
452 #endif // SHARE_CDS_HEAPSHARED_HPP
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CDS_HEAPSHARED_HPP
26 #define SHARE_CDS_HEAPSHARED_HPP
27
28 #include "cds/cds_globals.hpp"
29 #include "cds/dumpTimeClassInfo.hpp"
30 #include "cds/metaspaceShared.hpp"
31 #include "classfile/compactHashtable.hpp"
32 #include "classfile/javaClasses.hpp"
33 #include "gc/shared/gc_globals.hpp"
34 #include "memory/allocation.hpp"
35 #include "memory/allStatic.hpp"
36 #include "oops/compressedOops.hpp"
37 #include "oops/oop.hpp"
38 #include "oops/oopHandle.hpp"
39 #include "oops/oopsHierarchy.hpp"
40 #include "utilities/growableArray.hpp"
41 #include "utilities/resourceHash.hpp"
42
43 #if INCLUDE_CDS_JAVA_HEAP
44 class DumpedInternedStrings;
45 class FileMapInfo;
46 class KlassSubGraphInfo;
47 class MetaspaceObjToOopHandleTable;
48 class ResourceBitMap;
145
146 public:
147 // Can this VM write a heap region into the CDS archive? Currently only G1+compressed{oops,cp}
148 static bool can_write() {
149 CDS_JAVA_HEAP_ONLY(
150 if (_disable_writing) {
151 return false;
152 }
153 return (UseG1GC && UseCompressedClassPointers);
154 )
155 NOT_CDS_JAVA_HEAP(return false;)
156 }
157
158 static void disable_writing() {
159 CDS_JAVA_HEAP_ONLY(_disable_writing = true;)
160 }
161
162 static bool is_subgraph_root_class(InstanceKlass* ik);
163
164 // Scratch objects for archiving Klass::java_mirror()
165 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
166 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
167 static oop scratch_java_mirror(oop java_mirror) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
168 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
169
170 private:
171 #if INCLUDE_CDS_JAVA_HEAP
172 static bool _disable_writing;
173 static DumpedInternedStrings *_dumped_interned_strings;
174
175 // statistics
176 constexpr static int ALLOC_STAT_SLOTS = 16;
177 static size_t _alloc_count[ALLOC_STAT_SLOTS];
178 static size_t _alloc_size[ALLOC_STAT_SLOTS];
179 static size_t _total_obj_count;
180 static size_t _total_obj_size; // in HeapWords
181
182 static void count_allocation(size_t size);
183 static void print_stats();
184 public:
185 static unsigned oop_hash(oop const& p);
186 static unsigned string_oop_hash(oop const& string) {
187 return java_lang_String::hash_code(string);
188 }
189
190 class CopyKlassSubGraphInfoToArchive;
191
192 class CachedOopInfo {
193 // Used by CDSHeapVerifier.
194 oop _orig_referrer;
195
196 // The location of this object inside ArchiveHeapWriter::_buffer
197 size_t _buffer_offset;
198
199 // One or more fields in this object are pointing to non-null oops.
200 bool _has_oop_pointers;
201
202 // One or more fields in this object are pointing to MetaspaceObj
203 bool _has_native_pointers;
204 public:
205 CachedOopInfo(oop orig_referrer, bool has_oop_pointers)
206 : _orig_referrer(orig_referrer),
207 _buffer_offset(0),
208 _has_oop_pointers(has_oop_pointers),
209 _has_native_pointers(false) {}
210 oop orig_referrer() const { return _orig_referrer; }
211 void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
237 int _count;
238 };
239
240 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
241 inline static bool record_equals_compact_hashtable_entry(
242 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
243 return (value->klass() == key);
244 }
245
246 private:
247 typedef OffsetCompactHashtable<
248 const Klass*,
249 const ArchivedKlassSubGraphInfoRecord*,
250 record_equals_compact_hashtable_entry
251 > RunTimeKlassSubGraphInfoTable;
252
253 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
254 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
255
256 static CachedOopInfo make_cached_oop_info(oop obj);
257 static ArchivedKlassSubGraphInfoRecord* archive_subgraph_info(KlassSubGraphInfo* info);
258 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
259 bool is_full_module_graph);
260
261 // Archive object sub-graph starting from the given static field
262 // in Klass k's mirror.
263 static void archive_reachable_objects_from_static_field(
264 InstanceKlass* k, const char* klass_name,
265 int field_offset, const char* field_name);
266
267 static void verify_subgraph_from_static_field(
268 InstanceKlass* k, int field_offset) PRODUCT_RETURN;
269 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
270 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
271 static void check_default_subgraph_classes();
272
273 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
274 static KlassSubGraphInfo* get_subgraph_info(Klass *k);
275
276 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
277 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
279 // UseCompressedOops only: Used by decode_from_archive
280 static address _narrow_oop_base;
281 static int _narrow_oop_shift;
282
283 // !UseCompressedOops only: used to relocate pointers to the archived objects
284 static ptrdiff_t _runtime_delta;
285
286 typedef ResizeableResourceHashtable<oop, bool,
287 AnyObj::C_HEAP,
288 mtClassShared,
289 HeapShared::oop_hash> SeenObjectsTable;
290
291 static SeenObjectsTable *_seen_objects_table;
292
293 // The "default subgraph" is the root of all archived objects that do not belong to any
294 // of the classes defined in the <xxx>_archive_subgraph_entry_fields[] arrays:
295 // - interned strings
296 // - Klass::java_mirror()
297 // - ConstantPool::resolved_references()
298 static KlassSubGraphInfo* _default_subgraph_info;
299 static ArchivedKlassSubGraphInfoRecord* _runtime_default_subgraph_info;
300
301 static GrowableArrayCHeap<oop, mtClassShared>* _pending_roots;
302 static GrowableArrayCHeap<oop, mtClassShared>* _trace; // for debugging unarchivable objects
303 static GrowableArrayCHeap<const char*, mtClassShared>* _context; // for debugging unarchivable objects
304 static OopHandle _roots;
305 static int _permobj_segments;
306 static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
307 static MetaspaceObjToOopHandleTable* _scratch_java_mirror_table;
308 static MetaspaceObjToOopHandleTable* _scratch_references_table;
309
310 static void init_seen_objects_table() {
311 assert(_seen_objects_table == nullptr, "must be");
312 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
313 }
314 static void delete_seen_objects_table() {
315 assert(_seen_objects_table != nullptr, "must be");
316 delete _seen_objects_table;
317 _seen_objects_table = nullptr;
318 }
319
320 class ArchivingObjectMark;
321 class ContextMark;
322
323 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
324 static int _num_new_walked_objs;
325 static int _num_new_archived_objs;
326 static int _num_old_recorded_klasses;
327
328 // Statistics (for all archived subgraphs)
329 static int _num_total_subgraph_recordings;
330 static int _num_total_walked_objs;
331 static int _num_total_archived_objs;
332 static int _num_total_recorded_klasses;
333 static int _num_total_verifications;
334
335 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
336 bool is_full_module_graph);
337 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
338
339 static bool has_been_seen_during_subgraph_recording(oop obj);
340 static void set_has_been_seen_during_subgraph_recording(oop obj);
341 static bool archive_object(oop obj);
342 static void copy_preinitialized_mirror(Klass* orig_k, oop orig_mirror, oop m);
343 static void copy_interned_strings();
344
345 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
346 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
347 static void clear_archived_roots_of(Klass* k);
348 static const ArchivedKlassSubGraphInfoRecord*
349 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
350 static void resolve_or_init(const char* klass_name, bool do_init, TRAPS);
351 static void resolve_or_init(Klass* k, bool do_init, TRAPS);
352 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
353
354 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
355 MemRegion& archive_space);
356 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
357 uintptr_t buffer);
358 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
359 int num_loaded_regions, uintptr_t buffer);
360 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
361 int num_loaded_regions);
362 static void fill_failed_loaded_region();
363 static void mark_native_pointers(oop orig_obj);
364 static bool has_been_archived(oop orig_obj);
365 static bool can_mirror_be_used_in_subgraph(oop orig_java_mirror);
366 static void archive_java_mirrors();
367 static void archive_strings();
368 static void exit_on_error();
369 public:
370 static void reset_archived_object_states(TRAPS);
371 static void create_archived_object_cache() {
372 _archived_object_cache =
373 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
374 }
375 static void destroy_archived_object_cache() {
376 delete _archived_object_cache;
377 _archived_object_cache = nullptr;
378 }
379 static ArchivedObjectCache* archived_object_cache() {
380 return _archived_object_cache;
381 }
382
383 static int archive_exception_instance(oop exception);
384 static void archive_objects(ArchiveHeapInfo* heap_info);
385 static void copy_objects();
386 static void copy_special_objects();
387
388 static bool archive_reachable_objects_from(int level,
389 KlassSubGraphInfo* subgraph_info,
390 oop orig_obj);
391
392 static ResourceBitMap calculate_oopmap(MemRegion region); // marks all the oop pointers
393 static void add_to_dumped_interned_strings(oop string);
394
395 static void track_scratch_object(oop orig_obj, oop scratch_obj);
396
397 // Scratch objects for archiving Klass::java_mirror()
398 static void set_scratch_java_mirror(Klass* k, oop mirror);
399 static void remove_scratch_objects(Klass* k);
400 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
401 static void set_has_native_pointers(oop src_obj);
402
403 // We use the HeapShared::roots() array to make sure that objects stored in the
404 // archived heap region are not prematurely collected. These roots include:
405 //
406 // - mirrors of classes that have not yet been loaded.
407 // - ConstantPool::resolved_references() of classes that have not yet been loaded.
408 // - ArchivedKlassSubGraphInfoRecords that have not been initialized
409 // - java.lang.Module objects that have not yet been added to the module graph
410 //
411 // When a mirror M becomes referenced by a newly loaded class K, M will be removed
412 // from HeapShared::roots() via clear_root(), and K will be responsible for
413 // keeping M alive.
414 //
415 // Other types of roots are also cleared similarly when they become referenced.
416
417 // Dump-time only. Returns the index of the root, which can be used at run time to read
418 // the root using get_root(index, ...).
419 static int append_root(oop obj);
420 static GrowableArrayCHeap<oop, mtClassShared>* pending_roots() { return _pending_roots; }
421
422 // Dump-time and runtime
423 static objArrayOop roots();
424 static oop get_root(int index, bool clear=false);
425
426 // Run-time only
427 static void clear_root(int index);
428 static void set_permobj_segments(int n) { _permobj_segments = n; }
429 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
430 #endif // INCLUDE_CDS_JAVA_HEAP
431
432 public:
433 static oop orig_to_scratch_object(oop orig_obj);
434 static objArrayOop scratch_resolved_references(ConstantPool* src);
435 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
436 static void init_scratch_objects(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
437 static void init_box_classes(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
438 static bool is_heap_region(int idx) {
439 CDS_JAVA_HEAP_ONLY(return (idx == MetaspaceShared::hp);)
440 NOT_CDS_JAVA_HEAP_RETURN_(false);
441 }
442
443 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
444 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
445
446 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
447 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
448 static void serialize_misc_info(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
449 static void init_roots(oop roots_oop) NOT_CDS_JAVA_HEAP_RETURN;
450 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
451
452 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
453
454 static void add_to_permanent_index_table(oop obj, int index);
455 // AOT-compile time only: get a stable index for an archived object.
456 // Returns 0 if obj is not archived.
457 static int get_archived_object_permanent_index(oop obj) NOT_CDS_JAVA_HEAP_RETURN_(-1);
458 // Runtime only: get back the same object for an index returned by
459 // get_archived_object_permanent_index().
460 static oop get_archived_object(int permanent_index) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
461
462 static void initialize_java_lang_invoke(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
463 static void initialize_default_subgraph_classes(Handle loader, TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
464
465 static bool is_lambda_form_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
466 static bool is_lambda_proxy_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
467 static bool is_archivable_hidden_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
468 };
469
470 #if INCLUDE_CDS_JAVA_HEAP
471 class DumpedInternedStrings :
472 public ResizeableResourceHashtable<oop, bool,
473 AnyObj::C_HEAP,
474 mtClassShared,
475 HeapShared::string_oop_hash>
476 {
477 public:
478 DumpedInternedStrings(unsigned size, unsigned max_size) :
479 ResizeableResourceHashtable<oop, bool,
480 AnyObj::C_HEAP,
481 mtClassShared,
482 HeapShared::string_oop_hash>(size, max_size) {}
483 };
484 #endif
485
486 #endif // SHARE_CDS_HEAPSHARED_HPP
|