< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahFreeSet.hpp

Print this page

 32 private:
 33   ShenandoahHeap* const _heap;
 34   CHeapBitMap _mutator_free_bitmap;
 35   CHeapBitMap _collector_free_bitmap;
 36   size_t _max;
 37 
 38   // Left-most and right-most region indexes. There are no free regions outside
 39   // of [left-most; right-most] index intervals
 40   size_t _mutator_leftmost, _mutator_rightmost;
 41   size_t _collector_leftmost, _collector_rightmost;
 42 
 43   size_t _capacity;
 44   size_t _used;
 45 
 46   void assert_bounds() const NOT_DEBUG_RETURN;
 47 
 48   bool is_mutator_free(size_t idx) const;
 49   bool is_collector_free(size_t idx) const;
 50 
 51   HeapWord* try_allocate_in(ShenandoahHeapRegion* region, ShenandoahAllocRequest& req, bool& in_new_region);








 52   HeapWord* allocate_single(ShenandoahAllocRequest& req, bool& in_new_region);
 53   HeapWord* allocate_contiguous(ShenandoahAllocRequest& req);
 54 
 55   void flip_to_gc(ShenandoahHeapRegion* r);
 56 
 57   void recompute_bounds();
 58   void adjust_bounds();
 59   bool touches_bounds(size_t num) const;
 60 
 61   void increase_used(size_t amount);
 62   void clear_internal();
 63 
 64   size_t collector_count() const { return _collector_free_bitmap.count_one_bits(); }
 65   size_t mutator_count()   const { return _mutator_free_bitmap.count_one_bits();   }
 66 
 67   void try_recycle_trashed(ShenandoahHeapRegion *r);
 68 
 69   bool can_allocate_from(ShenandoahHeapRegion *r);
 70   size_t alloc_capacity(ShenandoahHeapRegion *r);
 71   bool has_no_alloc_capacity(ShenandoahHeapRegion *r);

 32 private:
 33   ShenandoahHeap* const _heap;
 34   CHeapBitMap _mutator_free_bitmap;
 35   CHeapBitMap _collector_free_bitmap;
 36   size_t _max;
 37 
 38   // Left-most and right-most region indexes. There are no free regions outside
 39   // of [left-most; right-most] index intervals
 40   size_t _mutator_leftmost, _mutator_rightmost;
 41   size_t _collector_leftmost, _collector_rightmost;
 42 
 43   size_t _capacity;
 44   size_t _used;
 45 
 46   void assert_bounds() const NOT_DEBUG_RETURN;
 47 
 48   bool is_mutator_free(size_t idx) const;
 49   bool is_collector_free(size_t idx) const;
 50 
 51   HeapWord* try_allocate_in(ShenandoahHeapRegion* region, ShenandoahAllocRequest& req, bool& in_new_region);
 52   HeapWord* allocate_with_affiliation(ShenandoahRegionAffiliation affiliation, ShenandoahAllocRequest& req, bool& in_new_region);
 53 
 54   // While holding the heap lock, allocate memory for a single object which is to be entirely contained
 55   // within a single HeapRegion as characterized by req.  The req.size() value is known to be less than or
 56   // equal to ShenandoahHeapRegion::humongous_threshold_words().  The caller of allocate_single is responsible
 57   // for registering the resulting object and setting the remembered set card values as appropriate.  The
 58   // most common case is that we are allocating a PLAB in which case object registering and card dirtying
 59   // is managed after the PLAB is divided into individual objects.
 60   HeapWord* allocate_single(ShenandoahAllocRequest& req, bool& in_new_region);
 61   HeapWord* allocate_contiguous(ShenandoahAllocRequest& req);
 62 
 63   void flip_to_gc(ShenandoahHeapRegion* r);
 64 
 65   void recompute_bounds();
 66   void adjust_bounds();
 67   bool touches_bounds(size_t num) const;
 68 
 69   void increase_used(size_t amount);
 70   void clear_internal();
 71 
 72   size_t collector_count() const { return _collector_free_bitmap.count_one_bits(); }
 73   size_t mutator_count()   const { return _mutator_free_bitmap.count_one_bits();   }
 74 
 75   void try_recycle_trashed(ShenandoahHeapRegion *r);
 76 
 77   bool can_allocate_from(ShenandoahHeapRegion *r);
 78   size_t alloc_capacity(ShenandoahHeapRegion *r);
 79   bool has_no_alloc_capacity(ShenandoahHeapRegion *r);
< prev index next >