< prev index next >

src/hotspot/share/cds/archiveBuilder.hpp

Print this page

128     bool _read_only;
129     bool _has_embedded_pointer;
130     FollowMode _follow_mode;
131     int _size_in_bytes;
132     int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
133              // when the object is added into _source_objs.
134     MetaspaceObj::Type _msotype;
135     address _source_addr;    // The source object to be copied.
136     address _buffered_addr;  // The copy of this object insider the buffer.
137   public:
138     SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
139       _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
140       _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
141       _source_addr(ref->obj()) {
142       if (follow_mode == point_to_it) {
143         _buffered_addr = ref->obj();
144       } else {
145         _buffered_addr = nullptr;
146       }
147     }




148 
149     // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
150     //   src = address of a Method or InstanceKlass that has been regenerated.
151     //   renegerated_obj_info = info for the regenerated version of src.
152     SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
153       _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
154       _follow_mode(renegerated_obj_info->_follow_mode),
155       _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
156       _source_addr(src),  _buffered_addr(renegerated_obj_info->_buffered_addr) {}
157 
158     bool should_copy() const { return _follow_mode == make_a_copy; }
159     void set_buffered_addr(address addr)  {
160       assert(should_copy(), "must be");
161       assert(_buffered_addr == nullptr, "cannot be copied twice");
162       assert(addr != nullptr, "must be a valid copy");
163       _buffered_addr = addr;
164     }
165     void set_ptrmap_start(uintx v) { _ptrmap_start = v;    }
166     void set_ptrmap_end(uintx v)   { _ptrmap_end = v;      }
167     uintx ptrmap_start()  const    { return _ptrmap_start; } // inclusive

209   ReservedSpace _shared_rs;
210   VirtualSpace _shared_vs;
211 
212   // The "pz" region is used only during static dumps to reserve an unused space between SharedBaseAddress and
213   // the bottom of the rw region. During runtime, this space will be filled with a reserved area that disallows
214   // read/write/exec, so we can track for bad CompressedKlassPointers encoding.
215   // Note: this region does NOT exist in the cds archive.
216   DumpRegion _pz_region;
217 
218   DumpRegion _rw_region;
219   DumpRegion _ro_region;
220   DumpRegion _ac_region; // AOT code
221 
222   // Combined bitmap to track pointers in both RW and RO regions. This is updated
223   // as objects are copied into RW and RO.
224   CHeapBitMap _ptrmap;
225 
226   // _ptrmap is split into these two bitmaps which are written into the archive.
227   CHeapBitMap _rw_ptrmap;   // marks pointers in the RW region
228   CHeapBitMap _ro_ptrmap;   // marks pointers in the RO region

229 
230   SourceObjList _rw_src_objs;                 // objs to put in rw region
231   SourceObjList _ro_src_objs;                 // objs to put in ro region
232   ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
233   ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
234   GrowableArray<Klass*>* _klasses;
235   GrowableArray<Symbol*>* _symbols;
236   unsigned int _entropy_seed;
237 
238   // statistics
239   DumpAllocStats _alloc_stats;
240   size_t _total_heap_region_size;
241   struct {
242     size_t _num_ptrs;
243     size_t _num_tagged_ptrs;
244     size_t _num_nulled_ptrs;
245   } _relocated_ptr_info;
246 
247   void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
248   void print_bitmap_region_stats(size_t size, size_t total_size);

255   // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
256   // These are usually for misc tables that are allocated in the RO space.
257   class OtherROAllocMark {
258     char* _oldtop;
259   public:
260     OtherROAllocMark() {
261       _oldtop = _current->_ro_region.top();
262     }
263     ~OtherROAllocMark();
264   };
265 
266   void count_relocated_pointer(bool tagged, bool nulled);
267 
268 private:
269   FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
270 
271   void iterate_sorted_roots(MetaspaceClosure* it);
272   void sort_klasses();
273   static int compare_symbols_by_address(Symbol** a, Symbol** b);
274   static int compare_klass_by_name(Klass** a, Klass** b);

275 
276   void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
277   void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
278 
279   void relocate_embedded_pointers(SourceObjList* src_objs);
280 
281   bool is_excluded(Klass* k);
282   void clean_up_src_obj_table();
283 
284 protected:
285   virtual void iterate_roots(MetaspaceClosure* it) = 0;
286   void start_dump_region(DumpRegion* next);
287 
288 public:
289   address reserve_buffer();
290 
291   address buffer_bottom()                    const { return _buffer_bottom;                        }
292   address buffer_top()                       const { return (address)current_dump_region()->top(); }
293   address requested_static_archive_bottom()  const { return  _requested_static_archive_bottom;     }
294   address mapped_static_archive_bottom()     const { return  _mapped_static_archive_bottom;        }

128     bool _read_only;
129     bool _has_embedded_pointer;
130     FollowMode _follow_mode;
131     int _size_in_bytes;
132     int _id; // Each object has a unique serial ID, starting from zero. The ID is assigned
133              // when the object is added into _source_objs.
134     MetaspaceObj::Type _msotype;
135     address _source_addr;    // The source object to be copied.
136     address _buffered_addr;  // The copy of this object insider the buffer.
137   public:
138     SourceObjInfo(MetaspaceClosure::Ref* ref, bool read_only, FollowMode follow_mode) :
139       _ptrmap_start(0), _ptrmap_end(0), _read_only(read_only), _has_embedded_pointer(false), _follow_mode(follow_mode),
140       _size_in_bytes(ref->size() * BytesPerWord), _id(0), _msotype(ref->msotype()),
141       _source_addr(ref->obj()) {
142       if (follow_mode == point_to_it) {
143         _buffered_addr = ref->obj();
144       } else {
145         _buffered_addr = nullptr;
146       }
147     }
148     SourceObjInfo(address src, address buf) {
149       _source_addr = src;
150       _buffered_addr = buf;
151     }
152 
153     // This constructor is only used for regenerated objects (created by LambdaFormInvokers, etc).
154     //   src = address of a Method or InstanceKlass that has been regenerated.
155     //   renegerated_obj_info = info for the regenerated version of src.
156     SourceObjInfo(address src, SourceObjInfo* renegerated_obj_info) :
157       _ptrmap_start(0), _ptrmap_end(0), _read_only(false),
158       _follow_mode(renegerated_obj_info->_follow_mode),
159       _size_in_bytes(0), _msotype(renegerated_obj_info->_msotype),
160       _source_addr(src),  _buffered_addr(renegerated_obj_info->_buffered_addr) {}
161 
162     bool should_copy() const { return _follow_mode == make_a_copy; }
163     void set_buffered_addr(address addr)  {
164       assert(should_copy(), "must be");
165       assert(_buffered_addr == nullptr, "cannot be copied twice");
166       assert(addr != nullptr, "must be a valid copy");
167       _buffered_addr = addr;
168     }
169     void set_ptrmap_start(uintx v) { _ptrmap_start = v;    }
170     void set_ptrmap_end(uintx v)   { _ptrmap_end = v;      }
171     uintx ptrmap_start()  const    { return _ptrmap_start; } // inclusive

213   ReservedSpace _shared_rs;
214   VirtualSpace _shared_vs;
215 
216   // The "pz" region is used only during static dumps to reserve an unused space between SharedBaseAddress and
217   // the bottom of the rw region. During runtime, this space will be filled with a reserved area that disallows
218   // read/write/exec, so we can track for bad CompressedKlassPointers encoding.
219   // Note: this region does NOT exist in the cds archive.
220   DumpRegion _pz_region;
221 
222   DumpRegion _rw_region;
223   DumpRegion _ro_region;
224   DumpRegion _ac_region; // AOT code
225 
226   // Combined bitmap to track pointers in both RW and RO regions. This is updated
227   // as objects are copied into RW and RO.
228   CHeapBitMap _ptrmap;
229 
230   // _ptrmap is split into these two bitmaps which are written into the archive.
231   CHeapBitMap _rw_ptrmap;   // marks pointers in the RW region
232   CHeapBitMap _ro_ptrmap;   // marks pointers in the RO region
233   CHeapBitMap _ac_ptrmap;   // marks pointers in the CC region
234 
235   SourceObjList _rw_src_objs;                 // objs to put in rw region
236   SourceObjList _ro_src_objs;                 // objs to put in ro region
237   ResizeableResourceHashtable<address, SourceObjInfo, AnyObj::C_HEAP, mtClassShared> _src_obj_table;
238   ResizeableResourceHashtable<address, address, AnyObj::C_HEAP, mtClassShared> _buffered_to_src_table;
239   GrowableArray<Klass*>* _klasses;
240   GrowableArray<Symbol*>* _symbols;
241   unsigned int _entropy_seed;
242 
243   // statistics
244   DumpAllocStats _alloc_stats;
245   size_t _total_heap_region_size;
246   struct {
247     size_t _num_ptrs;
248     size_t _num_tagged_ptrs;
249     size_t _num_nulled_ptrs;
250   } _relocated_ptr_info;
251 
252   void print_region_stats(FileMapInfo *map_info, ArchiveHeapInfo* heap_info);
253   void print_bitmap_region_stats(size_t size, size_t total_size);

260   // Use this when you allocate space outside of ArchiveBuilder::dump_{rw,ro}_region.
261   // These are usually for misc tables that are allocated in the RO space.
262   class OtherROAllocMark {
263     char* _oldtop;
264   public:
265     OtherROAllocMark() {
266       _oldtop = _current->_ro_region.top();
267     }
268     ~OtherROAllocMark();
269   };
270 
271   void count_relocated_pointer(bool tagged, bool nulled);
272 
273 private:
274   FollowMode get_follow_mode(MetaspaceClosure::Ref *ref);
275 
276   void iterate_sorted_roots(MetaspaceClosure* it);
277   void sort_klasses();
278   static int compare_symbols_by_address(Symbol** a, Symbol** b);
279   static int compare_klass_by_name(Klass** a, Klass** b);
280   void update_hidden_class_loader_type(InstanceKlass* ik) NOT_CDS_JAVA_HEAP_RETURN;
281 
282   void make_shallow_copies(DumpRegion *dump_region, const SourceObjList* src_objs);
283   void make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info);
284 
285   void relocate_embedded_pointers(SourceObjList* src_objs);
286 
287   bool is_excluded(Klass* k);
288   void clean_up_src_obj_table();
289 
290 protected:
291   virtual void iterate_roots(MetaspaceClosure* it) = 0;
292   void start_dump_region(DumpRegion* next);
293 
294 public:
295   address reserve_buffer();
296 
297   address buffer_bottom()                    const { return _buffer_bottom;                        }
298   address buffer_top()                       const { return (address)current_dump_region()->top(); }
299   address requested_static_archive_bottom()  const { return  _requested_static_archive_bottom;     }
300   address mapped_static_archive_bottom()     const { return  _mapped_static_archive_bottom;        }
< prev index next >