< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.hpp

Print this page

192   // "System.gc". This implies as full a collection as the CollectedHeap
193   // supports. Caller does not hold the Heap_lock on entry.
194   virtual void collect(GCCause::Cause cause);
195 
196   // The same as above but assume that the caller holds the Heap_lock.
197   void collect_locked(GCCause::Cause cause);
198 
199   // Perform a full collection of generations up to and including max_generation.
200   // Mostly used for testing purposes. Caller does not hold the Heap_lock on entry.
201   void collect(GCCause::Cause cause, GenerationType max_generation);
202 
203   // Returns "TRUE" iff "p" points into the committed areas of the heap.
204   // The methods is_in() and is_in_youngest() may be expensive to compute
205   // in general, so, to prevent their inadvertent use in product jvm's, we
206   // restrict their use to assertion checking or verification only.
207   bool is_in(const void* p) const;
208 
209   // Returns true if the reference is to an object in the reserved space
210   // for the young generation.
211   // Assumes the the young gen address range is less than that of the old gen.
212   bool is_in_young(oop p);


213 
214 #ifdef ASSERT
215   bool is_in_partial_collection(const void* p);
216 #endif
217 
218   // Optimized nmethod scanning support routines
219   virtual void register_nmethod(nmethod* nm);
220   virtual void unregister_nmethod(nmethod* nm);
221   virtual void verify_nmethod(nmethod* nm);
222   virtual void flush_nmethod(nmethod* nm);
223 
224   void prune_scavengable_nmethods();
225 
226   // Iteration functions.
227   void oop_iterate(OopIterateClosure* cl);
228   void object_iterate(ObjectClosure* cl);
229   Space* space_containing(const void* addr) const;
230 
231   // A CollectedHeap is divided into a dense sequence of "blocks"; that is,
232   // each address in the (reserved) heap is a member of exactly

192   // "System.gc". This implies as full a collection as the CollectedHeap
193   // supports. Caller does not hold the Heap_lock on entry.
194   virtual void collect(GCCause::Cause cause);
195 
196   // The same as above but assume that the caller holds the Heap_lock.
197   void collect_locked(GCCause::Cause cause);
198 
199   // Perform a full collection of generations up to and including max_generation.
200   // Mostly used for testing purposes. Caller does not hold the Heap_lock on entry.
201   void collect(GCCause::Cause cause, GenerationType max_generation);
202 
203   // Returns "TRUE" iff "p" points into the committed areas of the heap.
204   // The methods is_in() and is_in_youngest() may be expensive to compute
205   // in general, so, to prevent their inadvertent use in product jvm's, we
206   // restrict their use to assertion checking or verification only.
207   bool is_in(const void* p) const;
208 
209   // Returns true if the reference is to an object in the reserved space
210   // for the young generation.
211   // Assumes the the young gen address range is less than that of the old gen.
212   bool is_in_young(oop p) const;
213 
214   virtual bool requires_barriers(oop obj) const { return !is_in_young(obj); }
215 
216 #ifdef ASSERT
217   bool is_in_partial_collection(const void* p);
218 #endif
219 
220   // Optimized nmethod scanning support routines
221   virtual void register_nmethod(nmethod* nm);
222   virtual void unregister_nmethod(nmethod* nm);
223   virtual void verify_nmethod(nmethod* nm);
224   virtual void flush_nmethod(nmethod* nm);
225 
226   void prune_scavengable_nmethods();
227 
228   // Iteration functions.
229   void oop_iterate(OopIterateClosure* cl);
230   void object_iterate(ObjectClosure* cl);
231   Space* space_containing(const void* addr) const;
232 
233   // A CollectedHeap is divided into a dense sequence of "blocks"; that is,
234   // each address in the (reserved) heap is a member of exactly
< prev index next >