< prev index next >

src/hotspot/share/gc/shared/space.hpp

Print this page

414 
415   // Some contiguous spaces may maintain some data structures that should
416   // be updated whenever an allocation crosses a boundary.  This function
417   // returns the first such boundary.
418   // (The default implementation returns the end of the space, so the
419   // boundary is never crossed.)
420   virtual HeapWord* initialize_threshold() { return end(); }
421 
422   // "q" is an object of the given "size" that should be forwarded;
423   // "cp" names the generation ("gen") and containing "this" (which must
424   // also equal "cp->space").  "compact_top" is where in "this" the
425   // next object should be forwarded to.  If there is room in "this" for
426   // the object, insert an appropriate forwarding pointer in "q".
427   // If not, go to the next compaction space (there must
428   // be one, since compaction must succeed -- we go to the first space of
429   // the previous generation if necessary, updating "cp"), reset compact_top
430   // and then forward.  In either case, returns the new value of "compact_top".
431   // If the forwarding crosses "cp->threshold", invokes the "cross_threshold"
432   // function of the then-current compaction space, and updates "cp->threshold
433   // accordingly".
434   virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp,

435                     HeapWord* compact_top);
436 
437   // Return a size with adjustments as required of the space.
438   virtual size_t adjust_object_size_v(size_t size) const { return size; }
439 
440   void set_first_dead(HeapWord* value) { _first_dead = value; }
441   void set_end_of_live(HeapWord* value) { _end_of_live = value; }
442 
443 protected:
444   // Used during compaction.
445   HeapWord* _first_dead;
446   HeapWord* _end_of_live;
447 
448   // This the function is invoked when an allocation of an object covering
449   // "start" to "end occurs crosses the threshold; returns the next
450   // threshold.  (The default implementation does nothing.)
451   virtual HeapWord* cross_threshold(HeapWord* start, HeapWord* the_end) {
452     return end();
453   }
454 
455   // Below are template functions for scan_and_* algorithms (avoiding virtual calls).
456   // The space argument should be a subclass of CompactibleSpace, implementing
457   // scan_limit(), scanned_block_is_obj(), and scanned_block_size(),
458   // and possibly also overriding obj_size(), and adjust_obj_size().
459   // These functions should avoid virtual calls whenever possible.
460 
461 #if INCLUDE_SERIALGC
462   // Frequently calls adjust_obj_size().
463   template <class SpaceType>
464   static inline void scan_and_adjust_pointers(SpaceType* space);
465 #endif
466 
467   // Frequently calls obj_size().
468   template <class SpaceType>
469   static inline void scan_and_compact(SpaceType* space);
470 
471   // Frequently calls scanned_block_is_obj() and scanned_block_size().
472   // Requires the scan_limit() function.
473   template <class SpaceType>
474   static inline void scan_and_forward(SpaceType* space, CompactPoint* cp);
475 };
476 
477 class GenSpaceMangler;
478 
479 // A space in which the free area is contiguous.  It therefore supports
480 // faster allocation, and compaction.
481 class ContiguousSpace: public CompactibleSpace {
482   friend class VMStructs;
483   // Allow scan_and_forward function to call (private) overrides for auxiliary functions on this class
484   template <typename SpaceType>
485   friend void CompactibleSpace::scan_and_forward(SpaceType* space, CompactPoint* cp);
486 
487  private:
488   // Auxiliary functions for scan_and_forward support.
489   // See comments for CompactibleSpace for more information.
490   inline HeapWord* scan_limit() const {
491     return top();
492   }
493 
494   inline bool scanned_block_is_obj(const HeapWord* addr) const {
495     return true; // Always true, since scan_limit is top
496   }
497 
498   inline size_t scanned_block_size(const HeapWord* addr) const;
499 
500  protected:
501   HeapWord* _top;
502   // A helper for mangling the unused area of the space in debug builds.
503   GenSpaceMangler* _mangler;
504 

414 
415   // Some contiguous spaces may maintain some data structures that should
416   // be updated whenever an allocation crosses a boundary.  This function
417   // returns the first such boundary.
418   // (The default implementation returns the end of the space, so the
419   // boundary is never crossed.)
420   virtual HeapWord* initialize_threshold() { return end(); }
421 
422   // "q" is an object of the given "size" that should be forwarded;
423   // "cp" names the generation ("gen") and containing "this" (which must
424   // also equal "cp->space").  "compact_top" is where in "this" the
425   // next object should be forwarded to.  If there is room in "this" for
426   // the object, insert an appropriate forwarding pointer in "q".
427   // If not, go to the next compaction space (there must
428   // be one, since compaction must succeed -- we go to the first space of
429   // the previous generation if necessary, updating "cp"), reset compact_top
430   // and then forward.  In either case, returns the new value of "compact_top".
431   // If the forwarding crosses "cp->threshold", invokes the "cross_threshold"
432   // function of the then-current compaction space, and updates "cp->threshold
433   // accordingly".
434   template <bool ALT_FWD>
435   HeapWord* forward(oop q, size_t size, CompactPoint* cp,
436                     HeapWord* compact_top);
437 
438   // Return a size with adjustments as required of the space.
439   virtual size_t adjust_object_size_v(size_t size) const { return size; }
440 
441   void set_first_dead(HeapWord* value) { _first_dead = value; }
442   void set_end_of_live(HeapWord* value) { _end_of_live = value; }
443 
444 protected:
445   // Used during compaction.
446   HeapWord* _first_dead;
447   HeapWord* _end_of_live;
448 
449   // This the function is invoked when an allocation of an object covering
450   // "start" to "end occurs crosses the threshold; returns the next
451   // threshold.  (The default implementation does nothing.)
452   virtual HeapWord* cross_threshold(HeapWord* start, HeapWord* the_end) {
453     return end();
454   }
455 
456   // Below are template functions for scan_and_* algorithms (avoiding virtual calls).
457   // The space argument should be a subclass of CompactibleSpace, implementing
458   // scan_limit(), scanned_block_is_obj(), and scanned_block_size(),
459   // and possibly also overriding obj_size(), and adjust_obj_size().
460   // These functions should avoid virtual calls whenever possible.
461 
462 #if INCLUDE_SERIALGC
463   // Frequently calls adjust_obj_size().
464   template <bool ALT_FWD, class SpaceType>
465   static inline void scan_and_adjust_pointers(SpaceType* space);
466 #endif
467 
468   // Frequently calls obj_size().
469   template <bool ALT_FWD, class SpaceType>
470   static inline void scan_and_compact(SpaceType* space);
471 
472   // Frequently calls scanned_block_is_obj() and scanned_block_size().
473   // Requires the scan_limit() function.
474   template <bool ALT_FWD, class SpaceType>
475   static inline void scan_and_forward(SpaceType* space, CompactPoint* cp);
476 };
477 
478 class GenSpaceMangler;
479 
480 // A space in which the free area is contiguous.  It therefore supports
481 // faster allocation, and compaction.
482 class ContiguousSpace: public CompactibleSpace {
483   friend class VMStructs;
484   // Allow scan_and_forward function to call (private) overrides for auxiliary functions on this class
485   template <bool ALT_FWD, typename SpaceType>
486   friend void CompactibleSpace::scan_and_forward(SpaceType* space, CompactPoint* cp);
487 
488  private:
489   // Auxiliary functions for scan_and_forward support.
490   // See comments for CompactibleSpace for more information.
491   inline HeapWord* scan_limit() const {
492     return top();
493   }
494 
495   inline bool scanned_block_is_obj(const HeapWord* addr) const {
496     return true; // Always true, since scan_limit is top
497   }
498 
499   inline size_t scanned_block_size(const HeapWord* addr) const;
500 
501  protected:
502   HeapWord* _top;
503   // A helper for mangling the unused area of the space in debug builds.
504   GenSpaceMangler* _mangler;
505 
< prev index next >