< prev index next >

src/hotspot/share/cds/archiveBuilder.hpp

Print this page

128     bool _read_only;
129     bool _has_embedded_pointer;
130     FollowMode _follow_mode;
131     int _size_in_bytes;
132     int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
133              // when the object is added into _source_objs.
134     MetaspaceObj::Type _msotype;
135     address _source_addr;    // The source object to be copied.
136     address _buffered_addr;  // The copy of this object insider the buffer.
137   public:
138     SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
139       _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
140       _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
141       _source_addr(ref->obj()) {
142       if (follow_mode == point_to_it) {
143         _buffered_addr = ref->obj();
144       } else {
145         _buffered_addr = nullptr;
146       }
147     }




148 
149     // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
150     //   src = address of a Method or InstanceKlass that has been regenerated.
151     //   renegerated_obj_info = info for the regenerated version of src.
152     SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
153       _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
154       _follow_mode(renegerated_obj_info->_follow_mode),
155       _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
156       _source_addr(src),  _buffered_addr(renegerated_obj_info->_buffered_addr) {}
157 
158     bool should_copy() const { return _follow_mode == make_a_copy; }
159     void set_buffered_addr(address addr)  {
160       assert(should_copy(), "must be");
161       assert(_buffered_addr == nullptr, "cannot be copied twice");
162       assert(addr != nullptr, "must be a valid copy");
163       _buffered_addr = addr;
164     }
165     void set_ptrmap_start(uintx v) { _ptrmap_start = v;    }
166     void set_ptrmap_end(uintx v)   { _ptrmap_end = v;      }
167     uintx ptrmap_start()  const    { return _ptrmap_start; } // inclusive

200     // convenience accessor
201     SourceObjInfo* at(int i) const { return objs()->at(i); }
202   };
203 
204   class CDSMapLogger;
205 
206   static const int INITIAL_TABLE_SIZE = 15889;
207   static const int MAX_TABLE_SIZE     = 1000000;
208 
209   ReservedSpace _shared_rs;
210   VirtualSpace _shared_vs;
211 
212   // The "pz" region is used only during static dumps to reserve an unused space between SharedBaseAddress and
213   // the bottom of the rw region. During runtime, this space will be filled with a reserved area that disallows
214   // read/write/exec, so we can track for bad CompressedKlassPointers encoding.
215   // Note: this region does NOT exist in the cds archive.
216   DumpRegion _pz_region;
217 
218   DumpRegion _rw_region;
219   DumpRegion _ro_region;

220 
221   // Combined bitmap to track pointers in both RW and RO regions. This is updated
222   // as objects are copied into RW and RO.
223   CHeapBitMap _ptrmap;
224 
225   // _ptrmap is split into these two bitmaps which are written into the archive.
226   CHeapBitMap _rw_ptrmap;   // marks pointers in the RW region
227   CHeapBitMap _ro_ptrmap;   // marks pointers in the RO region

228 
229   SourceObjList _rw_src_objs;                 // objs to put in rw region
230   SourceObjList _ro_src_objs;                 // objs to put in ro region
231   ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
232   ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
233   GrowableArray<Klass*>* _klasses;
234   GrowableArray<Symbol*>* _symbols;
235   unsigned int _entropy_seed;
236 
237   // statistics
238   DumpAllocStats _alloc_stats;
239   size_t _total_heap_region_size;





240 
241   void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
242   void print_bitmap_region_stats(size_t size, size_t total_size);
243   void print_heap_region_stats(ArchiveHeapInfo* heap_info, size_t total_size);
244 
245   // For global access.
246   static ArchiveBuilder* _current;
247 
248 public:
249   // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
250   // These are usually for misc tables that are allocated in the RO space.
251   class OtherROAllocMark {
252     char* _oldtop;
253   public:
254     OtherROAllocMark() {
255       _oldtop = _current->_ro_region.top();
256     }
257     ~OtherROAllocMark();
258   };
259 


260 private:
261   FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
262 
263   void iterate_sorted_roots(MetaspaceClosure* it);
264   void sort_klasses();
265   static int compare_symbols_by_address(Symbol** a, Symbol** b);
266   static int compare_klass_by_name(Klass** a, Klass** b);

267 
268   void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
269   void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
270 
271   void relocate_embedded_pointers(SourceObjList* src_objs);
272 
273   bool is_excluded(Klass* k);
274   void clean_up_src_obj_table();
275 
276 protected:
277   virtual void iterate_roots(MetaspaceClosure* it) = 0;
278   void start_dump_region(DumpRegion* next);
279 
280 public:
281   address reserve_buffer();
282 
283   address buffer_bottom()                    const { return _buffer_bottom;                        }
284   address buffer_top()                       const { return (address)current_dump_region()->top(); }
285   address requested_static_archive_bottom()  const { return  _requested_static_archive_bottom;     }
286   address mapped_static_archive_bottom()     const { return  _mapped_static_archive_bottom;        }

355   template <typename T>
356   T offset_to_buffered(u4 offset) const {
357     return (T)offset_to_buffered_address(offset);
358   }
359 
360 public:
361   ArchiveBuilder();
362   ~ArchiveBuilder();
363 
364   int entropy();
365   void gather_klasses_and_symbols();
366   void gather_source_objs();
367   bool gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only);
368   bool gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only);
369   void remember_embedded_pointer_in_enclosing_obj(MetaspaceClosure::Ref* ref);
370   static void serialize_dynamic_archivable_items(SerializeClosure* soc);
371 
372   DumpRegion* pz_region() { return &_pz_region; }
373   DumpRegion* rw_region() { return &_rw_region; }
374   DumpRegion* ro_region() { return &_ro_region; }




375 
376   static char* rw_region_alloc(size_t num_bytes) {
377     return current()->rw_region()->allocate(num_bytes);
378   }
379   static char* ro_region_alloc(size_t num_bytes) {
380     return current()->ro_region()->allocate(num_bytes);
381   }



382 
383   template <typename T>
384   static Array<T>* new_ro_array(int length) {
385     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
386     Array<T>* array = (Array<T>*)ro_region_alloc(byte_size);
387     array->initialize(length);
388     return array;
389   }
390 
391   template <typename T>
392   static Array<T>* new_rw_array(int length) {
393     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
394     Array<T>* array = (Array<T>*)rw_region_alloc(byte_size);
395     array->initialize(length);
396     return array;
397   }
398 
399   template <typename T>
400   static size_t ro_array_bytesize(int length) {
401     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
402     return align_up(byte_size, SharedSpaceObjectAlignment);
403   }
404 
405   char* ro_strdup(const char* s);
406 
407   static int compare_src_objs(SourceObjInfo** a, SourceObjInfo** b);
408   void sort_metadata_objs();
409   void dump_rw_metadata();
410   void dump_ro_metadata();
411   void relocate_metaspaceobj_embedded_pointers();
412   void record_regenerated_object(address orig_src_obj, address regen_src_obj);
413   void make_klasses_shareable();

414   void relocate_to_requested();
415   void write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info);
416   void write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region,
417                     bool read_only,  bool allow_exec);
418 
419   void write_pointer_in_buffer(address* ptr_location, address src_addr);
420   template <typename T> void write_pointer_in_buffer(T* ptr_location, T src_addr) {
421     write_pointer_in_buffer((address*)ptr_location, (address)src_addr);
422   }
423 
424   void mark_and_relocate_to_buffered_addr(address* ptr_location);
425   template <typename T> void mark_and_relocate_to_buffered_addr(T ptr_location) {
426     mark_and_relocate_to_buffered_addr((address*)ptr_location);
427   }
428 


429   bool has_been_buffered(address src_addr) const;
430   template <typename T> bool has_been_buffered(T src_addr) const {
431     return has_been_buffered((address)src_addr);
432   }
433 
434   address get_buffered_addr(address src_addr) const;
435   template <typename T> T get_buffered_addr(T src_addr) const {
436     return (T)get_buffered_addr((address)src_addr);

437   }
438 
439   address get_source_addr(address buffered_addr) const;
440   template <typename T> T get_source_addr(T buffered_addr) const {
441     return (T)get_source_addr((address)buffered_addr);
442   }
443 
444   // All klasses and symbols that will be copied into the archive
445   GrowableArray<Klass*>*  klasses() const { return _klasses; }
446   GrowableArray<Symbol*>* symbols() const { return _symbols; }
447 
448   static bool is_active() {
449     return (_current != nullptr);

450   }
451 
452   static ArchiveBuilder* current() {
453     assert(_current != nullptr, "ArchiveBuilder must be active");
454     return _current;
455   }
456 
457   static DumpAllocStats* alloc_stats() {
458     return &(current()->_alloc_stats);
459   }
460 
461   static CompactHashtableStats* symbol_stats() {
462     return alloc_stats()->symbol_stats();
463   }
464 
465   static CompactHashtableStats* string_stats() {
466     return alloc_stats()->string_stats();
467   }
468 
469   narrowKlass get_requested_narrow_klass(Klass* k);

128     bool _read_only;
129     bool _has_embedded_pointer;
130     FollowMode _follow_mode;
131     int _size_in_bytes;
132     int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
133              // when the object is added into _source_objs.
134     MetaspaceObj::Type _msotype;
135     address _source_addr;    // The source object to be copied.
136     address _buffered_addr;  // The copy of this object insider the buffer.
137   public:
138     SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
139       _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
140       _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
141       _source_addr(ref->obj()) {
142       if (follow_mode == point_to_it) {
143         _buffered_addr = ref->obj();
144       } else {
145         _buffered_addr = nullptr;
146       }
147     }
148     SourceObjInfo(address src, address buf) {
149       _source_addr = src;
150       _buffered_addr = buf;
151     }
152 
153     // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
154     //   src = address of a Method or InstanceKlass that has been regenerated.
155     //   renegerated_obj_info = info for the regenerated version of src.
156     SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
157       _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
158       _follow_mode(renegerated_obj_info->_follow_mode),
159       _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
160       _source_addr(src),  _buffered_addr(renegerated_obj_info->_buffered_addr) {}
161 
162     bool should_copy() const { return _follow_mode == make_a_copy; }
163     void set_buffered_addr(address addr)  {
164       assert(should_copy(), "must be");
165       assert(_buffered_addr == nullptr, "cannot be copied twice");
166       assert(addr != nullptr, "must be a valid copy");
167       _buffered_addr = addr;
168     }
169     void set_ptrmap_start(uintx v) { _ptrmap_start = v;    }
170     void set_ptrmap_end(uintx v)   { _ptrmap_end = v;      }
171     uintx ptrmap_start()  const    { return _ptrmap_start; } // inclusive

204     // convenience accessor
205     SourceObjInfo* at(int i) const { return objs()->at(i); }
206   };
207 
208   class CDSMapLogger;
209 
210   static const int INITIAL_TABLE_SIZE = 15889;
211   static const int MAX_TABLE_SIZE     = 1000000;
212 
213   ReservedSpace _shared_rs;
214   VirtualSpace _shared_vs;
215 
216   // The "pz" region is used only during static dumps to reserve an unused space between SharedBaseAddress and
217   // the bottom of the rw region. During runtime, this space will be filled with a reserved area that disallows
218   // read/write/exec, so we can track for bad CompressedKlassPointers encoding.
219   // Note: this region does NOT exist in the cds archive.
220   DumpRegion _pz_region;
221 
222   DumpRegion _rw_region;
223   DumpRegion _ro_region;
224   DumpRegion _cc_region;
225 
226   // Combined bitmap to track pointers in both RW and RO regions. This is updated
227   // as objects are copied into RW and RO.
228   CHeapBitMap _ptrmap;
229 
230   // _ptrmap is split into these two bitmaps which are written into the archive.
231   CHeapBitMap _rw_ptrmap;   // marks pointers in the RW region
232   CHeapBitMap _ro_ptrmap;   // marks pointers in the RO region
233   CHeapBitMap _cc_ptrmap;   // marks pointers in the CC region
234 
235   SourceObjList _rw_src_objs;                 // objs to put in rw region
236   SourceObjList _ro_src_objs;                 // objs to put in ro region
237   ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
238   ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
239   GrowableArray<Klass*>* _klasses;
240   GrowableArray<Symbol*>* _symbols;
241   unsigned int _entropy_seed;
242 
243   // statistics
244   DumpAllocStats _alloc_stats;
245   size_t _total_heap_region_size;
246   struct {
247     size_t _num_ptrs;
248     size_t _num_tagged_ptrs;
249     size_t _num_nulled_ptrs;
250   } _relocated_ptr_info;
251 
252   void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
253   void print_bitmap_region_stats(size_t size, size_t total_size);
254   void print_heap_region_stats(ArchiveHeapInfo* heap_info, size_t total_size);
255 
256   // For global access.
257   static ArchiveBuilder* _current;
258 
259 public:
260   // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
261   // These are usually for misc tables that are allocated in the RO space.
262   class OtherROAllocMark {
263     char* _oldtop;
264   public:
265     OtherROAllocMark() {
266       _oldtop = _current->_ro_region.top();
267     }
268     ~OtherROAllocMark();
269   };
270 
271   void count_relocated_pointer(bool tagged, bool nulled);
272 
273 private:
274   FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
275 
276   void iterate_sorted_roots(MetaspaceClosure* it);
277   void sort_klasses();
278   static int compare_symbols_by_address(Symbol** a, Symbol** b);
279   static int compare_klass_by_name(Klass** a, Klass** b);
280   void update_hidden_class_loader_type(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN;
281 
282   void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
283   void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
284 
285   void relocate_embedded_pointers(SourceObjList* src_objs);
286 
287   bool is_excluded(Klass* k);
288   void clean_up_src_obj_table();
289 
290 protected:
291   virtual void iterate_roots(MetaspaceClosure* it) = 0;
292   void start_dump_region(DumpRegion* next);
293 
294 public:
295   address reserve_buffer();
296 
297   address buffer_bottom()                    const { return _buffer_bottom;                        }
298   address buffer_top()                       const { return (address)current_dump_region()->top(); }
299   address requested_static_archive_bottom()  const { return  _requested_static_archive_bottom;     }
300   address mapped_static_archive_bottom()     const { return  _mapped_static_archive_bottom;        }

369   template <typename T>
370   T offset_to_buffered(u4 offset) const {
371     return (T)offset_to_buffered_address(offset);
372   }
373 
374 public:
375   ArchiveBuilder();
376   ~ArchiveBuilder();
377 
378   int entropy();
379   void gather_klasses_and_symbols();
380   void gather_source_objs();
381   bool gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only);
382   bool gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only);
383   void remember_embedded_pointer_in_enclosing_obj(MetaspaceClosure::Ref* ref);
384   static void serialize_dynamic_archivable_items(SerializeClosure* soc);
385 
386   DumpRegion* pz_region() { return &_pz_region; }
387   DumpRegion* rw_region() { return &_rw_region; }
388   DumpRegion* ro_region() { return &_ro_region; }
389   DumpRegion* cc_region() { return &_cc_region; }
390 
391   void start_cc_region();
392   void end_cc_region();
393 
394   static char* rw_region_alloc(size_t num_bytes) {
395     return current()->rw_region()->allocate(num_bytes);
396   }
397   static char* ro_region_alloc(size_t num_bytes) {
398     return current()->ro_region()->allocate(num_bytes);
399   }
400   static char* cc_region_alloc(size_t num_bytes) {
401     return current()->cc_region()->allocate(num_bytes);
402   }
403 
404   template <typename T>
405   static Array<T>* new_ro_array(int length) {
406     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
407     Array<T>* array = (Array<T>*)ro_region_alloc(byte_size);
408     array->initialize(length);
409     return array;
410   }
411 
412   template <typename T>
413   static Array<T>* new_rw_array(int length) {
414     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
415     Array<T>* array = (Array<T>*)rw_region_alloc(byte_size);
416     array->initialize(length);
417     return array;
418   }
419 
420   template <typename T>
421   static size_t ro_array_bytesize(int length) {
422     size_t byte_size = Array<T>::byte_sizeof(length, sizeof(T));
423     return align_up(byte_size, SharedSpaceObjectAlignment);
424   }
425 
426   char* ro_strdup(const char* s);
427 
428   static int compare_src_objs(SourceObjInfo** a, SourceObjInfo** b);
429   void sort_metadata_objs();
430   void dump_rw_metadata();
431   void dump_ro_metadata();
432   void relocate_metaspaceobj_embedded_pointers();
433   void record_regenerated_object(address orig_src_obj, address regen_src_obj);
434   void make_klasses_shareable();
435   void make_training_data_shareable();
436   void relocate_to_requested();
437   void write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info);
438   void write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region,
439                     bool read_only,  bool allow_exec);
440 
441   void write_pointer_in_buffer(address* ptr_location, address src_addr);
442   template <typename T> void write_pointer_in_buffer(T* ptr_location, T src_addr) {
443     write_pointer_in_buffer((address*)ptr_location, (address)src_addr);
444   }
445 
446   void mark_and_relocate_to_buffered_addr(address* ptr_location);
447   template <typename T> void mark_and_relocate_to_buffered_addr(T ptr_location) {
448     mark_and_relocate_to_buffered_addr((address*)ptr_location);
449   }
450 
451   bool has_been_archived(address src_addr) const;
452 
453   bool has_been_buffered(address src_addr) const;
454   template <typename T> bool has_been_buffered(T src_addr) const {
455     return has_been_buffered((address)src_addr);
456   }
457 
458   address get_buffered_addr(address src_addr) const;
459   template <typename T> T get_buffered_addr(T src_addr) const {
460     CDS_ONLY(return (T)get_buffered_addr((address)src_addr);)
461     NOT_CDS(return nullptr;)
462   }
463 
464   address get_source_addr(address buffered_addr) const;
465   template <typename T> T get_source_addr(T buffered_addr) const {
466     return (T)get_source_addr((address)buffered_addr);
467   }
468 
469   // All klasses and symbols that will be copied into the archive
470   GrowableArray<Klass*>*  klasses() const { return _klasses; }
471   GrowableArray<Symbol*>* symbols() const { return _symbols; }
472 
473   static bool is_active() {
474     CDS_ONLY(return (_current != nullptr));
475     NOT_CDS(return false;)
476   }
477 
478   static ArchiveBuilder* current() {
479     assert(_current != nullptr, "ArchiveBuilder must be active");
480     return _current;
481   }
482 
483   static DumpAllocStats* alloc_stats() {
484     return &(current()->_alloc_stats);
485   }
486 
487   static CompactHashtableStats* symbol_stats() {
488     return alloc_stats()->symbol_stats();
489   }
490 
491   static CompactHashtableStats* string_stats() {
492     return alloc_stats()->string_stats();
493   }
494 
495   narrowKlass get_requested_narrow_klass(Klass* k);
< prev index next >