131 bool _read_only;
132 bool _has_embedded_pointer;
133 FollowMode _follow_mode;
134 int _size_in_bytes;
135 int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
136 // when the object is added into _source_objs.
137 MetaspaceObj::Type _msotype;
138 address _source_addr; // The source object to be copied.
139 address _buffered_addr; // The copy of this object insider the buffer.
140 public:
141 SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
142 _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
143 _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
144 _source_addr(ref->obj()) {
145 if (follow_mode == point_to_it) {
146 _buffered_addr = ref->obj();
147 } else {
148 _buffered_addr = nullptr;
149 }
150 }
151
152 // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
153 // src = address of a Method or InstanceKlass that has been regenerated.
154 // renegerated_obj_info = info for the regenerated version of src.
155 SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
156 _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
157 _follow_mode(renegerated_obj_info->_follow_mode),
158 _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
159 _source_addr(src), _buffered_addr(renegerated_obj_info->_buffered_addr) {}
160
161 bool should_copy() const { return _follow_mode == make_a_copy; }
162 void set_buffered_addr(address addr) {
163 assert(should_copy(), "must be");
164 assert(_buffered_addr == nullptr, "cannot be copied twice");
165 assert(addr != nullptr, "must be a valid copy");
166 _buffered_addr = addr;
167 }
168 void set_ptrmap_start(uintx v) { _ptrmap_start = v; }
169 void set_ptrmap_end(uintx v) { _ptrmap_end = v; }
170 uintx ptrmap_start() const { return _ptrmap_start; } // inclusive
197 GrowableArray<SourceObjInfo*>* objs() const { return _objs; }
198
199 void append(SourceObjInfo* src_info);
200 void remember_embedded_pointer(SourceObjInfo* pointing_obj, MetaspaceClosure::Ref* ref);
201 void relocate(int i, ArchiveBuilder* builder);
202
203 // convenience accessor
204 SourceObjInfo* at(int i) const { return objs()->at(i); }
205 };
206
207 class CDSMapLogger;
208
209 static const int INITIAL_TABLE_SIZE = 15889;
210 static const int MAX_TABLE_SIZE = 1000000;
211
212 ReservedSpace _shared_rs;
213 VirtualSpace _shared_vs;
214
215 DumpRegion _rw_region;
216 DumpRegion _ro_region;
217
218 // Combined bitmap to track pointers in both RW and RO regions. This is updated
219 // as objects are copied into RW and RO.
220 CHeapBitMap _ptrmap;
221
222 // _ptrmap is split into these two bitmaps which are written into the archive.
223 CHeapBitMap _rw_ptrmap; // marks pointers in the RW region
224 CHeapBitMap _ro_ptrmap; // marks pointers in the RO region
225
226 SourceObjList _rw_src_objs; // objs to put in rw region
227 SourceObjList _ro_src_objs; // objs to put in ro region
228 ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
229 ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
230 GrowableArray<Klass*>* _klasses;
231 GrowableArray<Symbol*>* _symbols;
232 unsigned int _entropy_seed;
233
234 // statistics
235 DumpAllocStats _alloc_stats;
236 size_t _total_heap_region_size;
237
238 void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
239 void print_bitmap_region_stats(size_t size, size_t total_size);
240 void print_heap_region_stats(ArchiveHeapInfo* heap_info, size_t total_size);
241
242 // For global access.
243 static ArchiveBuilder* _current;
244
245 public:
246 // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
247 // These are usually for misc tables that are allocated in the RO space.
248 class OtherROAllocMark {
249 char* _oldtop;
250 public:
251 OtherROAllocMark() {
252 _oldtop = _current->_ro_region.top();
253 }
254 ~OtherROAllocMark();
255 };
256
257 private:
258 FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
259
260 void iterate_sorted_roots(MetaspaceClosure* it);
261 void sort_klasses();
262 static int compare_symbols_by_address(Symbol** a, Symbol** b);
263 static int compare_klass_by_name(Klass** a, Klass** b);
264
265 void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
266 void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
267
268 void relocate_embedded_pointers(SourceObjList* src_objs);
269
270 bool is_excluded(Klass* k);
271 void clean_up_src_obj_table();
272
273 protected:
274 virtual void iterate_roots(MetaspaceClosure* it) = 0;
275
276 // Conservative estimate for number of bytes needed for:
277 size_t _estimated_metaspaceobj_bytes; // all archived MetaspaceObj's.
278 size_t _estimated_hashtable_bytes; // symbol table and dictionaries
279
280 static const int _total_dump_regions = 2;
281
282 size_t estimate_archive_size();
283
361
362 template <typename T>
363 T offset_to_buffered(u4 offset) const {
364 return (T)offset_to_buffered_address(offset);
365 }
366
367 public:
368 ArchiveBuilder();
369 ~ArchiveBuilder();
370
371 int entropy();
372 void gather_klasses_and_symbols();
373 void gather_source_objs();
374 bool gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only);
375 bool gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only);
376 void remember_embedded_pointer_in_enclosing_obj(MetaspaceClosure::Ref* ref);
377 static void serialize_dynamic_archivable_items(SerializeClosure* soc);
378
379 DumpRegion* rw_region() { return &_rw_region; }
380 DumpRegion* ro_region() { return &_ro_region; }
381
382 static char* rw_region_alloc(size_t num_bytes) {
383 return current()->rw_region()->allocate(num_bytes);
384 }
385 static char* ro_region_alloc(size_t num_bytes) {
386 return current()->ro_region()->allocate(num_bytes);
387 }
388
389 template <typename T>
390 static Array<T>* new_ro_array(int length) {
391 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
392 Array<T>* array = (Array<T>*)ro_region_alloc(byte_size);
393 array->initialize(length);
394 return array;
395 }
396
397 template <typename T>
398 static Array<T>* new_rw_array(int length) {
399 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
400 Array<T>* array = (Array<T>*)rw_region_alloc(byte_size);
401 array->initialize(length);
402 return array;
403 }
404
405 template <typename T>
406 static size_t ro_array_bytesize(int length) {
407 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
408 return align_up(byte_size, SharedSpaceObjectAlignment);
409 }
410
411 char* ro_strdup(const char* s);
412
413 static int compare_src_objs(SourceObjInfo** a, SourceObjInfo** b);
414 void sort_metadata_objs();
415 void dump_rw_metadata();
416 void dump_ro_metadata();
417 void relocate_metaspaceobj_embedded_pointers();
418 void record_regenerated_object(address orig_src_obj, address regen_src_obj);
419 void make_klasses_shareable();
420 void relocate_to_requested();
421 void write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info);
422 void write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region,
423 bool read_only, bool allow_exec);
424
425 void write_pointer_in_buffer(address* ptr_location, address src_addr);
426 template <typename T> void write_pointer_in_buffer(T* ptr_location, T src_addr) {
427 write_pointer_in_buffer((address*)ptr_location, (address)src_addr);
428 }
429
430 void mark_and_relocate_to_buffered_addr(address* ptr_location);
431 template <typename T> void mark_and_relocate_to_buffered_addr(T ptr_location) {
432 mark_and_relocate_to_buffered_addr((address*)ptr_location);
433 }
434
435 bool has_been_buffered(address src_addr) const;
436 template <typename T> bool has_been_buffered(T src_addr) const {
437 return has_been_buffered((address)src_addr);
438 }
439
440 address get_buffered_addr(address src_addr) const;
441 template <typename T> T get_buffered_addr(T src_addr) const {
442 return (T)get_buffered_addr((address)src_addr);
443 }
444
445 address get_source_addr(address buffered_addr) const;
446 template <typename T> T get_source_addr(T buffered_addr) const {
447 return (T)get_source_addr((address)buffered_addr);
448 }
449
450 // All klasses and symbols that will be copied into the archive
451 GrowableArray<Klass*>* klasses() const { return _klasses; }
452 GrowableArray<Symbol*>* symbols() const { return _symbols; }
453
454 static bool is_active() {
455 return (_current != nullptr);
456 }
457
458 static ArchiveBuilder* current() {
459 assert(_current != nullptr, "ArchiveBuilder must be active");
460 return _current;
461 }
462
463 static DumpAllocStats* alloc_stats() {
464 return &(current()->_alloc_stats);
465 }
466
467 static CompactHashtableStats* symbol_stats() {
468 return alloc_stats()->symbol_stats();
469 }
470
471 static CompactHashtableStats* string_stats() {
472 return alloc_stats()->string_stats();
473 }
474
475 narrowKlass get_requested_narrow_klass(Klass* k);
|
131 bool _read_only;
132 bool _has_embedded_pointer;
133 FollowMode _follow_mode;
134 int _size_in_bytes;
135 int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
136 // when the object is added into _source_objs.
137 MetaspaceObj::Type _msotype;
138 address _source_addr; // The source object to be copied.
139 address _buffered_addr; // The copy of this object insider the buffer.
140 public:
141 SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
142 _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
143 _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
144 _source_addr(ref->obj()) {
145 if (follow_mode == point_to_it) {
146 _buffered_addr = ref->obj();
147 } else {
148 _buffered_addr = nullptr;
149 }
150 }
151 SourceObjInfo(address src, address buf) {
152 _source_addr = src;
153 _buffered_addr = buf;
154 }
155
156 // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
157 // src = address of a Method or InstanceKlass that has been regenerated.
158 // renegerated_obj_info = info for the regenerated version of src.
159 SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
160 _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
161 _follow_mode(renegerated_obj_info->_follow_mode),
162 _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
163 _source_addr(src), _buffered_addr(renegerated_obj_info->_buffered_addr) {}
164
165 bool should_copy() const { return _follow_mode == make_a_copy; }
166 void set_buffered_addr(address addr) {
167 assert(should_copy(), "must be");
168 assert(_buffered_addr == nullptr, "cannot be copied twice");
169 assert(addr != nullptr, "must be a valid copy");
170 _buffered_addr = addr;
171 }
172 void set_ptrmap_start(uintx v) { _ptrmap_start = v; }
173 void set_ptrmap_end(uintx v) { _ptrmap_end = v; }
174 uintx ptrmap_start() const { return _ptrmap_start; } // inclusive
201 GrowableArray<SourceObjInfo*>* objs() const { return _objs; }
202
203 void append(SourceObjInfo* src_info);
204 void remember_embedded_pointer(SourceObjInfo* pointing_obj, MetaspaceClosure::Ref* ref);
205 void relocate(int i, ArchiveBuilder* builder);
206
207 // convenience accessor
208 SourceObjInfo* at(int i) const { return objs()->at(i); }
209 };
210
211 class CDSMapLogger;
212
213 static const int INITIAL_TABLE_SIZE = 15889;
214 static const int MAX_TABLE_SIZE = 1000000;
215
216 ReservedSpace _shared_rs;
217 VirtualSpace _shared_vs;
218
219 DumpRegion _rw_region;
220 DumpRegion _ro_region;
221 DumpRegion _cc_region;
222
223 // Combined bitmap to track pointers in both RW and RO regions. This is updated
224 // as objects are copied into RW and RO.
225 CHeapBitMap _ptrmap;
226
227 // _ptrmap is split into these two bitmaps which are written into the archive.
228 CHeapBitMap _rw_ptrmap; // marks pointers in the RW region
229 CHeapBitMap _ro_ptrmap; // marks pointers in the RO region
230 CHeapBitMap _cc_ptrmap; // marks pointers in the CC region
231
232 SourceObjList _rw_src_objs; // objs to put in rw region
233 SourceObjList _ro_src_objs; // objs to put in ro region
234 ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
235 ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
236 GrowableArray<Klass*>* _klasses;
237 GrowableArray<Symbol*>* _symbols;
238 unsigned int _entropy_seed;
239
240 // statistics
241 DumpAllocStats _alloc_stats;
242 size_t _total_heap_region_size;
243
244 void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
245 void print_bitmap_region_stats(size_t size, size_t total_size);
246 void print_heap_region_stats(ArchiveHeapInfo* heap_info, size_t total_size);
247
248 // For global access.
249 static ArchiveBuilder* _current;
250
251 public:
252 // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
253 // These are usually for misc tables that are allocated in the RO space.
254 class OtherROAllocMark {
255 char* _oldtop;
256 public:
257 OtherROAllocMark() {
258 _oldtop = _current->_ro_region.top();
259 }
260 ~OtherROAllocMark();
261 };
262
263 private:
264 FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
265
266 void iterate_sorted_roots(MetaspaceClosure* it);
267 void sort_klasses();
268 static int compare_symbols_by_address(Symbol** a, Symbol** b);
269 static int compare_klass_by_name(Klass** a, Klass** b);
270 void update_hidden_class_loader_type(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN;
271
272 void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
273 void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
274
275 void relocate_embedded_pointers(SourceObjList* src_objs);
276
277 bool is_excluded(Klass* k);
278 void clean_up_src_obj_table();
279
280 protected:
281 virtual void iterate_roots(MetaspaceClosure* it) = 0;
282
283 // Conservative estimate for number of bytes needed for:
284 size_t _estimated_metaspaceobj_bytes; // all archived MetaspaceObj's.
285 size_t _estimated_hashtable_bytes; // symbol table and dictionaries
286
287 static const int _total_dump_regions = 2;
288
289 size_t estimate_archive_size();
290
368
369 template <typename T>
370 T offset_to_buffered(u4 offset) const {
371 return (T)offset_to_buffered_address(offset);
372 }
373
374 public:
375 ArchiveBuilder();
376 ~ArchiveBuilder();
377
378 int entropy();
379 void gather_klasses_and_symbols();
380 void gather_source_objs();
381 bool gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only);
382 bool gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only);
383 void remember_embedded_pointer_in_enclosing_obj(MetaspaceClosure::Ref* ref);
384 static void serialize_dynamic_archivable_items(SerializeClosure* soc);
385
386 DumpRegion* rw_region() { return &_rw_region; }
387 DumpRegion* ro_region() { return &_ro_region; }
388 DumpRegion* cc_region() { return &_cc_region; }
389
390 void start_cc_region();
391 void end_cc_region();
392
393 static char* rw_region_alloc(size_t num_bytes) {
394 return current()->rw_region()->allocate(num_bytes);
395 }
396 static char* ro_region_alloc(size_t num_bytes) {
397 return current()->ro_region()->allocate(num_bytes);
398 }
399 static char* cc_region_alloc(size_t num_bytes) {
400 return current()->cc_region()->allocate(num_bytes);
401 }
402
403 template <typename T>
404 static Array<T>* new_ro_array(int length) {
405 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
406 Array<T>* array = (Array<T>*)ro_region_alloc(byte_size);
407 array->initialize(length);
408 return array;
409 }
410
411 template <typename T>
412 static Array<T>* new_rw_array(int length) {
413 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
414 Array<T>* array = (Array<T>*)rw_region_alloc(byte_size);
415 array->initialize(length);
416 return array;
417 }
418
419 template <typename T>
420 static size_t ro_array_bytesize(int length) {
421 size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
422 return align_up(byte_size, SharedSpaceObjectAlignment);
423 }
424
425 char* ro_strdup(const char* s);
426
427 static int compare_src_objs(SourceObjInfo** a, SourceObjInfo** b);
428 void sort_metadata_objs();
429 void dump_rw_metadata();
430 void dump_ro_metadata();
431 void relocate_metaspaceobj_embedded_pointers();
432 void record_regenerated_object(address orig_src_obj, address regen_src_obj);
433 void make_klasses_shareable();
434 void make_training_data_shareable();
435 void relocate_to_requested();
436 void write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info);
437 void write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region,
438 bool read_only, bool allow_exec);
439
440 void write_pointer_in_buffer(address* ptr_location, address src_addr);
441 template <typename T> void write_pointer_in_buffer(T* ptr_location, T src_addr) {
442 write_pointer_in_buffer((address*)ptr_location, (address)src_addr);
443 }
444
445 void mark_and_relocate_to_buffered_addr(address* ptr_location);
446 template <typename T> void mark_and_relocate_to_buffered_addr(T ptr_location) {
447 mark_and_relocate_to_buffered_addr((address*)ptr_location);
448 }
449
450 bool has_been_archived(address src_addr) const;
451
452 bool has_been_buffered(address src_addr) const;
453 template <typename T> bool has_been_buffered(T src_addr) const {
454 return has_been_buffered((address)src_addr);
455 }
456 address get_buffered_addr(address src_addr) const;
457 template <typename T> T get_buffered_addr(T src_addr) const {
458 CDS_ONLY(return (T)get_buffered_addr((address)src_addr);)
459 NOT_CDS(return nullptr;)
460 }
461
462 address get_source_addr(address buffered_addr) const;
463 template <typename T> T get_source_addr(T buffered_addr) const {
464 return (T)get_source_addr((address)buffered_addr);
465 }
466
467 // All klasses and symbols that will be copied into the archive
468 GrowableArray<Klass*>* klasses() const { return _klasses; }
469 GrowableArray<Symbol*>* symbols() const { return _symbols; }
470
471 static bool is_active() {
472 CDS_ONLY(return (_current != nullptr));
473 NOT_CDS(return false;)
474 }
475
476 static ArchiveBuilder* current() {
477 assert(_current != nullptr, "ArchiveBuilder must be active");
478 return _current;
479 }
480
481 static DumpAllocStats* alloc_stats() {
482 return &(current()->_alloc_stats);
483 }
484
485 static CompactHashtableStats* symbol_stats() {
486 return alloc_stats()->symbol_stats();
487 }
488
489 static CompactHashtableStats* string_stats() {
490 return alloc_stats()->string_stats();
491 }
492
493 narrowKlass get_requested_narrow_klass(Klass* k);
|