1 /*
2 * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CDS_HEAPSHARED_HPP
26 #define SHARE_CDS_HEAPSHARED_HPP
27
28 #include "cds/aotMetaspace.hpp"
29 #include "cds/cds_globals.hpp"
30 #include "cds/dumpTimeClassInfo.hpp"
31 #include "classfile/compactHashtable.hpp"
32 #include "classfile/javaClasses.hpp"
33 #include "gc/shared/gc_globals.hpp"
34 #include "memory/allocation.hpp"
35 #include "memory/allStatic.hpp"
36 #include "oops/compressedOops.hpp"
37 #include "oops/oop.hpp"
38 #include "oops/oopHandle.hpp"
39 #include "oops/oopsHierarchy.hpp"
40 #include "utilities/growableArray.hpp"
41 #include "utilities/hashTable.hpp"
42
43 #if INCLUDE_CDS_JAVA_HEAP
44 class DumpedInternedStrings;
45 class FileMapInfo;
46 class KlassSubGraphInfo;
47 class MetaspaceObjToOopHandleTable;
48 class ResourceBitMap;
49
50 struct ArchivableStaticFieldInfo;
51 class ArchiveHeapInfo;
52
53 #define ARCHIVED_BOOT_LAYER_CLASS "jdk/internal/module/ArchivedBootLayer"
54 #define ARCHIVED_BOOT_LAYER_FIELD "archivedBootLayer"
55
56 // A dump time sub-graph info for Klass _k. It includes the entry points
57 // (static fields in _k's mirror) of the archived sub-graphs reachable
58 // from _k's mirror. It also contains a list of Klasses of the objects
59 // within the sub-graphs.
60 class KlassSubGraphInfo: public CHeapObj<mtClass> {
61 private:
62 // The class that contains the static field(s) as the entry point(s)
63 // of archived object sub-graph(s).
64 Klass* _k;
65 // A list of classes need to be loaded and initialized before the archived
66 // object sub-graphs can be accessed at runtime.
67 GrowableArray<Klass*>* _subgraph_object_klasses;
68 // A list of _k's static fields as the entry points of archived sub-graphs.
69 // For each entry field, it is a tuple of field_offset, field_value
70 GrowableArray<int>* _subgraph_entry_fields;
71
72 // Does this KlassSubGraphInfo belong to the archived full module graph
73 bool _is_full_module_graph;
74
75 // Does this KlassSubGraphInfo references any classes that were loaded while
76 // JvmtiExport::is_early_phase()!=true. If so, this KlassSubGraphInfo cannot be
77 // used at runtime if JVMTI ClassFileLoadHook is enabled.
78 bool _has_non_early_klasses;
79 static bool is_non_early_klass(Klass* k);
80 static void check_allowed_klass(InstanceKlass* ik);
81 public:
82 KlassSubGraphInfo(Klass* k, bool is_full_module_graph) :
83 _k(k), _subgraph_object_klasses(nullptr),
84 _subgraph_entry_fields(nullptr),
85 _is_full_module_graph(is_full_module_graph),
86 _has_non_early_klasses(false) {}
87
88 ~KlassSubGraphInfo() {
89 if (_subgraph_object_klasses != nullptr) {
90 delete _subgraph_object_klasses;
91 }
92 if (_subgraph_entry_fields != nullptr) {
93 delete _subgraph_entry_fields;
94 }
95 };
96
97 Klass* klass() { return _k; }
98 GrowableArray<Klass*>* subgraph_object_klasses() {
99 return _subgraph_object_klasses;
100 }
101 GrowableArray<int>* subgraph_entry_fields() {
102 return _subgraph_entry_fields;
103 }
104 void add_subgraph_entry_field(int static_field_offset, oop v);
105 void add_subgraph_object_klass(Klass *orig_k);
106 int num_subgraph_object_klasses() {
107 return _subgraph_object_klasses == nullptr ? 0 :
108 _subgraph_object_klasses->length();
109 }
110 bool is_full_module_graph() const { return _is_full_module_graph; }
111 bool has_non_early_klasses() const { return _has_non_early_klasses; }
112 };
113
114 // An archived record of object sub-graphs reachable from static
115 // fields within _k's mirror. The record is reloaded from the archive
116 // at runtime.
117 class ArchivedKlassSubGraphInfoRecord {
118 private:
119 Klass* _k;
120 bool _is_full_module_graph;
121 bool _has_non_early_klasses;
122
123 // contains pairs of field offset and value for each subgraph entry field
124 Array<int>* _entry_field_records;
125
126 // klasses of objects in archived sub-graphs referenced from the entry points
127 // (static fields) in the containing class
128 Array<Klass*>* _subgraph_object_klasses;
129 public:
130 ArchivedKlassSubGraphInfoRecord() :
131 _k(nullptr), _entry_field_records(nullptr), _subgraph_object_klasses(nullptr) {}
132 void init(KlassSubGraphInfo* info);
133 Klass* klass() const { return _k; }
134 Array<int>* entry_field_records() const { return _entry_field_records; }
135 Array<Klass*>* subgraph_object_klasses() const { return _subgraph_object_klasses; }
136 bool is_full_module_graph() const { return _is_full_module_graph; }
137 bool has_non_early_klasses() const { return _has_non_early_klasses; }
138 };
139 #endif // INCLUDE_CDS_JAVA_HEAP
140
141 struct LoadedArchiveHeapRegion;
142
143 class HeapShared: AllStatic {
144 friend class VerifySharedOopClosure;
145
146 public:
147 static bool is_subgraph_root_class(InstanceKlass* ik);
148
149 // Scratch objects for archiving Klass::java_mirror()
150 static oop scratch_java_mirror(BasicType t) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
151 static oop scratch_java_mirror(Klass* k) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
152 static oop scratch_java_mirror(oop java_mirror) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
153 static bool is_archived_boot_layer_available(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN_(false);
154
155 private:
156 #if INCLUDE_CDS_JAVA_HEAP
157 static DumpedInternedStrings *_dumped_interned_strings;
158
159 // statistics
160 constexpr static int ALLOC_STAT_SLOTS = 16;
161 static size_t _alloc_count[ALLOC_STAT_SLOTS];
162 static size_t _alloc_size[ALLOC_STAT_SLOTS];
163 static size_t _total_obj_count;
164 static size_t _total_obj_size; // in HeapWords
165
166 static void count_allocation(size_t size);
167 static void print_stats();
168 public:
169 static void debug_trace();
170 static unsigned oop_hash(oop const& p);
171 static unsigned oop_handle_hash(OopHandle const& oh);
172 static unsigned oop_handle_hash_raw(OopHandle const& oh);
173 static bool oop_handle_equals(const OopHandle& a, const OopHandle& b);
174 static unsigned string_oop_hash(oop const& string) {
175 return java_lang_String::hash_code(string);
176 }
177
178 class CopyKlassSubGraphInfoToArchive;
179
180 class CachedOopInfo {
181 // Used by CDSHeapVerifier.
182 OopHandle _orig_referrer;
183
184 // The location of this object inside ArchiveHeapWriter::_buffer
185 size_t _buffer_offset;
186
187 // One or more fields in this object are pointing to non-null oops.
188 bool _has_oop_pointers;
189
190 // One or more fields in this object are pointing to MetaspaceObj
191 bool _has_native_pointers;
192 public:
193 CachedOopInfo(OopHandle orig_referrer, bool has_oop_pointers)
194 : _orig_referrer(orig_referrer),
195 _buffer_offset(0),
196 _has_oop_pointers(has_oop_pointers),
197 _has_native_pointers(false) {}
198 oop orig_referrer() const;
199 void set_buffer_offset(size_t offset) { _buffer_offset = offset; }
200 size_t buffer_offset() const { return _buffer_offset; }
201 bool has_oop_pointers() const { return _has_oop_pointers; }
202 bool has_native_pointers() const { return _has_native_pointers; }
203 void set_has_native_pointers() { _has_native_pointers = true; }
204 };
205
206 private:
207 static const int INITIAL_TABLE_SIZE = 15889; // prime number
208 static const int MAX_TABLE_SIZE = 1000000;
209 typedef ResizeableHashTable<OopHandle, CachedOopInfo,
210 AnyObj::C_HEAP,
211 mtClassShared,
212 HeapShared::oop_handle_hash_raw,
213 HeapShared::oop_handle_equals> ArchivedObjectCache;
214 static ArchivedObjectCache* _archived_object_cache;
215
216 class DumpTimeKlassSubGraphInfoTable
217 : public HashTable<Klass*, KlassSubGraphInfo,
218 137, // prime number
219 AnyObj::C_HEAP,
220 mtClassShared,
221 DumpTimeSharedClassTable_hash> {
222 public:
223 int _count;
224 };
225
226 public: // solaris compiler wants this for RunTimeKlassSubGraphInfoTable
227 inline static bool record_equals_compact_hashtable_entry(
228 const ArchivedKlassSubGraphInfoRecord* value, const Klass* key, int len_unused) {
229 return (value->klass() == key);
230 }
231
232 private:
233 typedef OffsetCompactHashtable<
234 const Klass*,
235 const ArchivedKlassSubGraphInfoRecord*,
236 record_equals_compact_hashtable_entry
237 > RunTimeKlassSubGraphInfoTable;
238
239 static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
240 static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
241
242 static CachedOopInfo make_cached_oop_info(oop obj, oop referrer);
243 static ArchivedKlassSubGraphInfoRecord* archive_subgraph_info(KlassSubGraphInfo* info);
244 static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
245 bool is_full_module_graph);
246
247 // Archive object sub-graph starting from the given static field
248 // in Klass k's mirror.
249 static void archive_reachable_objects_from_static_field(
250 InstanceKlass* k, const char* klass_name,
251 int field_offset, const char* field_name);
252
253 static void verify_subgraph_from_static_field(
254 InstanceKlass* k, int field_offset) PRODUCT_RETURN;
255 static void verify_reachable_objects_from(oop obj) PRODUCT_RETURN;
256 static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
257 static void check_special_subgraph_classes();
258
259 static KlassSubGraphInfo* init_subgraph_info(Klass *k, bool is_full_module_graph);
260 static KlassSubGraphInfo* get_subgraph_info(Klass *k);
261
262 static void init_subgraph_entry_fields(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
263 static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[], TRAPS);
264
265 // UseCompressedOops only: Used by decode_from_archive
266 static address _narrow_oop_base;
267 static int _narrow_oop_shift;
268
269 // !UseCompressedOops only: used to relocate pointers to the archived objects
270 static ptrdiff_t _runtime_delta;
271
272 typedef ResizeableHashTable<oop, bool,
273 AnyObj::C_HEAP,
274 mtClassShared,
275 HeapShared::oop_hash> SeenObjectsTable;
276
277 static SeenObjectsTable *_seen_objects_table;
278 // The "special subgraph" contains all the archived objects that are reachable
279 // from the following roots:
280 // - interned strings
281 // - Klass::java_mirror() -- including aot-initialized mirrors such as those of Enum klasses.
282 // - ConstantPool::resolved_references()
283 // - Universe::<xxx>_exception_instance()
284 static KlassSubGraphInfo* _dump_time_special_subgraph; // for collecting info during dump time
285 static ArchivedKlassSubGraphInfoRecord* _run_time_special_subgraph; // for initializing classes during run time.
286
287 static GrowableArrayCHeap<OopHandle, mtClassShared>* _pending_roots;
288 static GrowableArrayCHeap<OopHandle, mtClassShared>* _root_segments;
289 static GrowableArrayCHeap<const char*, mtClassShared>* _context; // for debugging unarchivable objects
290 static int _root_segment_max_size_elems;
291 static OopHandle _scratch_basic_type_mirrors[T_VOID+1];
292 static MetaspaceObjToOopHandleTable* _scratch_objects_table;
293
294 static void init_seen_objects_table() {
295 assert(_seen_objects_table == nullptr, "must be");
296 _seen_objects_table = new (mtClass)SeenObjectsTable(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
297 }
298 static void delete_seen_objects_table() {
299 assert(_seen_objects_table != nullptr, "must be");
300 delete _seen_objects_table;
301 _seen_objects_table = nullptr;
302 }
303
304 class ContextMark;
305
306 // Statistics (for one round of start_recording_subgraph ... done_recording_subgraph)
307 static int _num_new_walked_objs;
308 static int _num_new_archived_objs;
309 static int _num_old_recorded_klasses;
310
311 // Statistics (for all archived subgraphs)
312 static int _num_total_subgraph_recordings;
313 static int _num_total_walked_objs;
314 static int _num_total_archived_objs;
315 static int _num_total_recorded_klasses;
316 static int _num_total_verifications;
317
318 static void start_recording_subgraph(InstanceKlass *k, const char* klass_name,
319 bool is_full_module_graph);
320 static void done_recording_subgraph(InstanceKlass *k, const char* klass_name);
321
322 static bool has_been_seen_during_subgraph_recording(oop obj);
323 static void set_has_been_seen_during_subgraph_recording(oop obj);
324 static bool archive_object(oop obj, oop referrer, KlassSubGraphInfo* subgraph_info);
325
326 static void resolve_classes_for_subgraphs(JavaThread* current, ArchivableStaticFieldInfo fields[]);
327 static void resolve_classes_for_subgraph_of(JavaThread* current, Klass* k);
328 static void clear_archived_roots_of(Klass* k);
329 static const ArchivedKlassSubGraphInfoRecord*
330 resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS);
331 static void resolve_or_init(const char* klass_name, bool do_init, TRAPS);
332 static void resolve_or_init(Klass* k, bool do_init, TRAPS);
333 static void init_archived_fields_for(Klass* k, const ArchivedKlassSubGraphInfoRecord* record);
334
335 static int init_loaded_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
336 MemRegion& archive_space);
337 static void sort_loaded_regions(LoadedArchiveHeapRegion* loaded_regions, int num_loaded_regions,
338 uintptr_t buffer);
339 static bool load_regions(FileMapInfo* mapinfo, LoadedArchiveHeapRegion* loaded_regions,
340 int num_loaded_regions, uintptr_t buffer);
341 static void init_loaded_heap_relocation(LoadedArchiveHeapRegion* reloc_info,
342 int num_loaded_regions);
343 static void fill_failed_loaded_region();
344 static void mark_native_pointers(oop orig_obj);
345 static bool has_been_archived(oop orig_obj);
346 static void prepare_resolved_references();
347 static void archive_strings();
348 static void archive_subgraphs();
349 static void copy_java_mirror(oop orig_mirror, oop scratch_m);
350
351 // PendingOop and PendingOopStack are used for recursively discovering all cacheable
352 // heap objects. The recursion is done using PendingOopStack so we won't overflow the
353 // C stack with deep reference chains.
354 class PendingOop {
355 oop _obj;
356 oop _referrer;
357 int _level;
358
359 public:
360 PendingOop() : _obj(nullptr), _referrer(nullptr), _level(-1) {}
361 PendingOop(oop obj, oop referrer, int level) : _obj(obj), _referrer(referrer), _level(level) {}
362
363 oop obj() const { return _obj; }
364 oop referrer() const { return _referrer; }
365 int level() const { return _level; }
366 };
367
368 class OopFieldPusher;
369 using PendingOopStack = GrowableArrayCHeap<PendingOop, mtClassShared>;
370
371 static PendingOop _object_being_archived;
372 static bool walk_one_object(PendingOopStack* stack, int level, KlassSubGraphInfo* subgraph_info,
373 oop orig_obj, oop referrer);
374
375 public:
376 static void exit_on_error();
377 static void reset_archived_object_states(TRAPS);
378 static void create_archived_object_cache() {
379 _archived_object_cache =
380 new (mtClass)ArchivedObjectCache(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE);
381 }
382 static void destroy_archived_object_cache() {
383 delete _archived_object_cache;
384 _archived_object_cache = nullptr;
385 }
386 static ArchivedObjectCache* archived_object_cache() {
387 return _archived_object_cache;
388 }
389
390 static CachedOopInfo* get_cached_oop_info(oop orig_obj) {
391 OopHandle oh(&orig_obj);
392 return _archived_object_cache->get(oh);
393 }
394
395 static int archive_exception_instance(oop exception);
396
397 static bool archive_reachable_objects_from(int level,
398 KlassSubGraphInfo* subgraph_info,
399 oop orig_obj);
400
401 static void add_to_dumped_interned_strings(oop string);
402 static bool is_dumped_interned_string(oop o);
403
404 static void track_scratch_object(oop orig_obj, oop scratch_obj);
405
406 // Scratch objects for archiving Klass::java_mirror()
407 static void set_scratch_java_mirror(Klass* k, oop mirror);
408 static void remove_scratch_objects(Klass* k);
409 static void get_pointer_info(oop src_obj, bool& has_oop_pointers, bool& has_native_pointers);
410 static void set_has_native_pointers(oop src_obj);
411
412 // We use the HeapShared::roots() array to make sure that objects stored in the
413 // archived heap region are not prematurely collected. These roots include:
414 //
415 // - mirrors of classes that have not yet been loaded.
416 // - ConstantPool::resolved_references() of classes that have not yet been loaded.
417 // - ArchivedKlassSubGraphInfoRecords that have not been initialized
418 // - java.lang.Module objects that have not yet been added to the module graph
419 //
420 // When a mirror M becomes referenced by a newly loaded class K, M will be removed
421 // from HeapShared::roots() via clear_root(), and K will be responsible for
422 // keeping M alive.
423 //
424 // Other types of roots are also cleared similarly when they become referenced.
425
426 // Dump-time only. Returns the index of the root, which can be used at run time to read
427 // the root using get_root(index, ...).
428 static int append_root(oop obj);
429 static GrowableArrayCHeap<OopHandle, mtClassShared>* pending_roots() { return _pending_roots; }
430
431 // Dump-time and runtime
432 static objArrayOop root_segment(int segment_idx);
433 static oop get_root(int index, bool clear=false);
434
435 // Run-time only
436 static void clear_root(int index);
437 static void get_segment_indexes(int index, int& segment_index, int& internal_index);
438 static void setup_test_class(const char* test_class_name) PRODUCT_RETURN;
439 #endif // INCLUDE_CDS_JAVA_HEAP
440
441 public:
442 static oop orig_to_scratch_object(oop orig_obj);
443 static void write_heap(ArchiveHeapInfo* heap_info) NOT_CDS_JAVA_HEAP_RETURN;
444 static objArrayOop scratch_resolved_references(ConstantPool* src);
445 static void add_scratch_resolved_references(ConstantPool* src, objArrayOop dest) NOT_CDS_JAVA_HEAP_RETURN;
446 static void init_dumping() NOT_CDS_JAVA_HEAP_RETURN;
447 static void init_scratch_objects_for_basic_type_mirrors(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
448 static void init_box_classes(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
449 static bool is_heap_region(int idx) {
450 CDS_JAVA_HEAP_ONLY(return (idx == AOTMetaspace::hp);)
451 NOT_CDS_JAVA_HEAP_RETURN_(false);
452 }
453 static void delete_tables_with_raw_oops() NOT_CDS_JAVA_HEAP_RETURN;
454
455 static void resolve_classes(JavaThread* current) NOT_CDS_JAVA_HEAP_RETURN;
456 static void initialize_from_archived_subgraph(JavaThread* current, Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
457
458 static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
459 static void write_subgraph_info_table() NOT_CDS_JAVA_HEAP_RETURN;
460 static void add_root_segment(objArrayOop segment_oop) NOT_CDS_JAVA_HEAP_RETURN;
461 static void init_root_segment_sizes(int max_size_elems) NOT_CDS_JAVA_HEAP_RETURN;
462 static void serialize_tables(SerializeClosure* soc) NOT_CDS_JAVA_HEAP_RETURN;
463
464 #ifndef PRODUCT
465 static bool is_a_test_class_in_unnamed_module(Klass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
466 static void initialize_test_class_from_archive(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
467 #endif
468
469 static void add_to_permanent_oop_table(oop obj, int offset);
470
471 // AOT-compile time only: get a stable index for an archived object.
472 // Returns 0 if obj is not archived.
473 static int get_archived_object_permanent_index(oop obj) NOT_CDS_JAVA_HEAP_RETURN_(-1);
474 // Runtime only: get back the same object for an index returned by
475 // get_archived_object_permanent_index().
476 static oop get_archived_object(int permanent_index) NOT_CDS_JAVA_HEAP_RETURN_(nullptr);
477
478 static void initialize_java_lang_invoke(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
479 static void init_classes_for_special_subgraph(Handle loader, TRAPS) NOT_CDS_JAVA_HEAP_RETURN;
480
481 static bool is_core_java_lang_invoke_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
482 static bool is_lambda_form_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
483 static bool is_lambda_proxy_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
484 static bool is_string_concat_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
485 static bool is_archivable_hidden_klass(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN_(false);
486
487 // Used by AOTArtifactFinder
488 static void start_scanning_for_oops();
489 static void end_scanning_for_oops();
490 static void scan_java_class(Klass* k);
491 static void scan_java_mirror(oop orig_mirror);
492 static void copy_and_rescan_aot_inited_mirror(InstanceKlass* ik);
493 };
494
495 class CachedCodeDirectoryInternal {
496 int _permanent_oop_count;
497 int* _permanent_oop_offsets; // offset of each permanent object from the bottom of the archived heap
498 public:
499 void dumptime_init_internal() NOT_CDS_JAVA_HEAP_RETURN;
500 void runtime_init_internal() NOT_CDS_JAVA_HEAP_RETURN;
501 };
502
503 #if INCLUDE_CDS_JAVA_HEAP
504 class DumpedInternedStrings :
505 public ResizeableHashTable<oop, bool,
506 AnyObj::C_HEAP,
507 mtClassShared,
508 HeapShared::string_oop_hash>
509 {
510 public:
511 DumpedInternedStrings(unsigned size, unsigned max_size) :
512 ResizeableHashTable<oop, bool,
513 AnyObj::C_HEAP,
514 mtClassShared,
515 HeapShared::string_oop_hash>(size, max_size) {}
516 };
517 #endif
518
519 #endif // SHARE_CDS_HEAPSHARED_HPP