< prev index next >

src/hotspot/share/gc/shared/space.hpp

Print this page

294 
295   CompactPoint(Generation* g = nullptr) :
296     gen(g), space(nullptr) {}
297 };
298 
299 class GenSpaceMangler;
300 
301 // A space in which the free area is contiguous.  It therefore supports
302 // faster allocation, and compaction.
303 class ContiguousSpace: public Space {
304   friend class VMStructs;
305 
306 private:
307   HeapWord* _compaction_top;
308   ContiguousSpace* _next_compaction_space;
309 
310   static inline void verify_up_to_first_dead(ContiguousSpace* space) NOT_DEBUG_RETURN;
311 
312   static inline void clear_empty_region(ContiguousSpace* space);
313 
314  protected:











315   HeapWord* _top;
316   // A helper for mangling the unused area of the space in debug builds.
317   GenSpaceMangler* _mangler;
318 
319   // Used during compaction.
320   HeapWord* _first_dead;
321   HeapWord* _end_of_live;
322 
323   // This the function to invoke when an allocation of an object covering
324   // "start" to "end" occurs to update other internal data structures.
325   virtual void alloc_block(HeapWord* start, HeapWord* the_end) { }
326 
327   GenSpaceMangler* mangler() { return _mangler; }
328 
329   // Allocation helpers (return null if full).
330   inline HeapWord* allocate_impl(size_t word_size);
331   inline HeapWord* par_allocate_impl(size_t word_size);
332 
333  public:
334   ContiguousSpace();

381   // The maximum percentage of objects that can be dead in the compacted
382   // live part of a compacted space ("deadwood" support.)
383   virtual size_t allowed_dead_ratio() const { return 0; };
384 
385   // Some contiguous spaces may maintain some data structures that should
386   // be updated whenever an allocation crosses a boundary.  This function
387   // initializes these data structures for further updates.
388   virtual void initialize_threshold() { }
389 
390   // "q" is an object of the given "size" that should be forwarded;
391   // "cp" names the generation ("gen") and containing "this" (which must
392   // also equal "cp->space").  "compact_top" is where in "this" the
393   // next object should be forwarded to.  If there is room in "this" for
394   // the object, insert an appropriate forwarding pointer in "q".
395   // If not, go to the next compaction space (there must
396   // be one, since compaction must succeed -- we go to the first space of
397   // the previous generation if necessary, updating "cp"), reset compact_top
398   // and then forward.  In either case, returns the new value of "compact_top".
399   // Invokes the "alloc_block" function of the then-current compaction
400   // space.
401   virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp,

402                     HeapWord* compact_top);
403 
404   // Accessors
405   HeapWord* top() const            { return _top;    }
406   void set_top(HeapWord* value)    { _top = value; }
407 
408   void set_saved_mark()            { _saved_mark_word = top();    }
409 
410   bool saved_mark_at_top() const { return saved_mark_word() == top(); }
411 
412   // In debug mode mangle (write it with a particular bit
413   // pattern) the unused part of a space.
414 
415   // Used to save the address in a space for later use during mangling.
416   void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
417   // Used to save the space's current top for later use during mangling.
418   void set_top_for_allocations() PRODUCT_RETURN;
419 
420   // Mangle regions in the space from the current top up to the
421   // previously mangled part of the space.

294 
295   CompactPoint(Generation* g = nullptr) :
296     gen(g), space(nullptr) {}
297 };
298 
299 class GenSpaceMangler;
300 
301 // A space in which the free area is contiguous.  It therefore supports
302 // faster allocation, and compaction.
303 class ContiguousSpace: public Space {
304   friend class VMStructs;
305 
306 private:
307   HeapWord* _compaction_top;
308   ContiguousSpace* _next_compaction_space;
309 
310   static inline void verify_up_to_first_dead(ContiguousSpace* space) NOT_DEBUG_RETURN;
311 
312   static inline void clear_empty_region(ContiguousSpace* space);
313 
314 #if INCLUDE_SERIALGC
315   template <bool ALT_FWD>
316   void prepare_for_compaction_impl(CompactPoint* cp);
317 
318   template <bool ALT_FWD>
319   void adjust_pointers_impl();
320 
321   template <bool ALT_FWD>
322   void compact_impl();
323 #endif
324 
325 protected:
326   HeapWord* _top;
327   // A helper for mangling the unused area of the space in debug builds.
328   GenSpaceMangler* _mangler;
329 
330   // Used during compaction.
331   HeapWord* _first_dead;
332   HeapWord* _end_of_live;
333 
334   // This the function to invoke when an allocation of an object covering
335   // "start" to "end" occurs to update other internal data structures.
336   virtual void alloc_block(HeapWord* start, HeapWord* the_end) { }
337 
338   GenSpaceMangler* mangler() { return _mangler; }
339 
340   // Allocation helpers (return null if full).
341   inline HeapWord* allocate_impl(size_t word_size);
342   inline HeapWord* par_allocate_impl(size_t word_size);
343 
344  public:
345   ContiguousSpace();

392   // The maximum percentage of objects that can be dead in the compacted
393   // live part of a compacted space ("deadwood" support.)
394   virtual size_t allowed_dead_ratio() const { return 0; };
395 
396   // Some contiguous spaces may maintain some data structures that should
397   // be updated whenever an allocation crosses a boundary.  This function
398   // initializes these data structures for further updates.
399   virtual void initialize_threshold() { }
400 
401   // "q" is an object of the given "size" that should be forwarded;
402   // "cp" names the generation ("gen") and containing "this" (which must
403   // also equal "cp->space").  "compact_top" is where in "this" the
404   // next object should be forwarded to.  If there is room in "this" for
405   // the object, insert an appropriate forwarding pointer in "q".
406   // If not, go to the next compaction space (there must
407   // be one, since compaction must succeed -- we go to the first space of
408   // the previous generation if necessary, updating "cp"), reset compact_top
409   // and then forward.  In either case, returns the new value of "compact_top".
410   // Invokes the "alloc_block" function of the then-current compaction
411   // space.
412   template <bool ALT_FWD>
413   HeapWord* forward(oop q, size_t size, CompactPoint* cp,
414                     HeapWord* compact_top);
415 
416   // Accessors
417   HeapWord* top() const            { return _top;    }
418   void set_top(HeapWord* value)    { _top = value; }
419 
420   void set_saved_mark()            { _saved_mark_word = top();    }
421 
422   bool saved_mark_at_top() const { return saved_mark_word() == top(); }
423 
424   // In debug mode mangle (write it with a particular bit
425   // pattern) the unused part of a space.
426 
427   // Used to save the address in a space for later use during mangling.
428   void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
429   // Used to save the space's current top for later use during mangling.
430   void set_top_for_allocations() PRODUCT_RETURN;
431 
432   // Mangle regions in the space from the current top up to the
433   // previously mangled part of the space.
< prev index next >