< prev index next >

src/hotspot/share/gc/g1/heapRegion.hpp

Print this page

128   void reset_after_full_gc_common();
129 
130   void clear(bool mangle_space);
131 
132   HeapWord* block_start_const(const void* p) const;
133 
134   void mangle_unused_area() PRODUCT_RETURN;
135 
136   // Try to allocate at least min_word_size and up to desired_size from this region.
137   // Returns NULL if not possible, otherwise sets actual_word_size to the amount of
138   // space allocated.
139   // This version assumes that all allocation requests to this HeapRegion are properly
140   // synchronized.
141   inline HeapWord* allocate_impl(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
142   // Try to allocate at least min_word_size and up to desired_size from this HeapRegion.
143   // Returns NULL if not possible, otherwise sets actual_word_size to the amount of
144   // space allocated.
145   // This version synchronizes with other calls to par_allocate_impl().
146   inline HeapWord* par_allocate_impl(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
147 



148 public:
149   HeapWord* block_start(const void* p);
150 
151   void object_iterate(ObjectClosure* blk);
152 
153   // Allocation (return NULL if full).  Assumes the caller has established
154   // mutually exclusive access to the HeapRegion.
155   HeapWord* allocate(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
156   // Allocation (return NULL if full).  Enforces mutual exclusion internally.
157   HeapWord* par_allocate(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
158 
159   HeapWord* allocate(size_t word_size);
160   HeapWord* par_allocate(size_t word_size);
161 
162   inline HeapWord* par_allocate_no_bot_updates(size_t min_word_size, size_t desired_word_size, size_t* word_size);
163   inline HeapWord* allocate_no_bot_updates(size_t word_size);
164   inline HeapWord* allocate_no_bot_updates(size_t min_word_size, size_t desired_word_size, size_t* actual_size);
165 
166   // Full GC support methods.
167 
168   HeapWord* initialize_threshold();
169   HeapWord* cross_threshold(HeapWord* start, HeapWord* end);
170 
171   // Update heap region that has been compacted to be consistent after Full GC.
172   void reset_compacted_after_full_gc();
173   // Update skip-compacting heap region to be consistent after Full GC.
174   void reset_skip_compacting_after_full_gc();
175 
176   // All allocated blocks are occupied by objects in a HeapRegion
177   bool block_is_obj(const HeapWord* p) const;
178 
179   // Returns whether the given object is dead based on TAMS and bitmap.
180   // An object is dead iff a) it was not allocated since the last mark (>TAMS), b) it
181   // is not marked (bitmap).
182   bool is_obj_dead(const oop obj, const G1CMBitMap* const prev_bitmap) const;
183 
184   // Returns the object size for all valid block starts
185   // and the amount of unallocated words if called on top()

186   size_t block_size(const HeapWord* p) const;
187 
188   // Scans through the region using the bitmap to determine what
189   // objects to call size_t ApplyToMarkedClosure::apply(oop) for.
190   template<typename ApplyToMarkedClosure>
191   inline void apply_to_marked_objects(G1CMBitMap* bitmap, ApplyToMarkedClosure* closure);
192 
193   void reset_bot() {
194     _bot_part.reset_bot();
195   }
196 
197   void update_bot() {
198     _bot_part.update();
199   }
200 
201 private:
202   // The remembered set for this region.
203   HeapRegionRemSet* _rem_set;
204 
205   // Cached index of this region in the heap region sequence.

128   void reset_after_full_gc_common();
129 
130   void clear(bool mangle_space);
131 
132   HeapWord* block_start_const(const void* p) const;
133 
134   void mangle_unused_area() PRODUCT_RETURN;
135 
136   // Try to allocate at least min_word_size and up to desired_size from this region.
137   // Returns NULL if not possible, otherwise sets actual_word_size to the amount of
138   // space allocated.
139   // This version assumes that all allocation requests to this HeapRegion are properly
140   // synchronized.
141   inline HeapWord* allocate_impl(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
142   // Try to allocate at least min_word_size and up to desired_size from this HeapRegion.
143   // Returns NULL if not possible, otherwise sets actual_word_size to the amount of
144   // space allocated.
145   // This version synchronizes with other calls to par_allocate_impl().
146   inline HeapWord* par_allocate_impl(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
147 
148   template<bool RESOLVE>
149   void object_iterate_impl(ObjectClosure* blk);
150 
151 public:
152   HeapWord* block_start(const void* p);
153 
154   void object_iterate(ObjectClosure* blk);
155 
156   // Allocation (return NULL if full).  Assumes the caller has established
157   // mutually exclusive access to the HeapRegion.
158   HeapWord* allocate(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
159   // Allocation (return NULL if full).  Enforces mutual exclusion internally.
160   HeapWord* par_allocate(size_t min_word_size, size_t desired_word_size, size_t* actual_word_size);
161 
162   HeapWord* allocate(size_t word_size);
163   HeapWord* par_allocate(size_t word_size);
164 
165   inline HeapWord* par_allocate_no_bot_updates(size_t min_word_size, size_t desired_word_size, size_t* word_size);
166   inline HeapWord* allocate_no_bot_updates(size_t word_size);
167   inline HeapWord* allocate_no_bot_updates(size_t min_word_size, size_t desired_word_size, size_t* actual_size);
168 
169   // Full GC support methods.
170 
171   HeapWord* initialize_threshold();
172   HeapWord* cross_threshold(HeapWord* start, HeapWord* end);
173 
174   // Update heap region that has been compacted to be consistent after Full GC.
175   void reset_compacted_after_full_gc();
176   // Update skip-compacting heap region to be consistent after Full GC.
177   void reset_skip_compacting_after_full_gc();
178 
179   // All allocated blocks are occupied by objects in a HeapRegion
180   bool block_is_obj(const HeapWord* p) const;
181 
182   // Returns whether the given object is dead based on TAMS and bitmap.
183   // An object is dead iff a) it was not allocated since the last mark (>TAMS), b) it
184   // is not marked (bitmap).
185   bool is_obj_dead(const oop obj, const G1CMBitMap* const prev_bitmap) const;
186 
187   // Returns the object size for all valid block starts
188   // and the amount of unallocated words if called on top()
189   template<bool RESOLVE = false>
190   size_t block_size(const HeapWord* p) const;
191 
192   // Scans through the region using the bitmap to determine what
193   // objects to call size_t ApplyToMarkedClosure::apply(oop) for.
194   template<typename ApplyToMarkedClosure>
195   inline void apply_to_marked_objects(G1CMBitMap* bitmap, ApplyToMarkedClosure* closure);
196 
197   void reset_bot() {
198     _bot_part.reset_bot();
199   }
200 
201   void update_bot() {
202     _bot_part.update();
203   }
204 
205 private:
206   // The remembered set for this region.
207   HeapRegionRemSet* _rem_set;
208 
209   // Cached index of this region in the heap region sequence.
< prev index next >