< prev index next >

src/share/vm/gc_interface/collectedHeap.hpp

Print this page




  63  private:
  64   void log_heap(bool before);
  65 
  66  public:
  67   GCHeapLog() : EventLogBase<GCMessage>("GC Heap History") {}
  68 
  69   void log_heap_before() {
  70     log_heap(true);
  71   }
  72   void log_heap_after() {
  73     log_heap(false);
  74   }
  75 };
  76 
  77 //
  78 // CollectedHeap
  79 //   SharedHeap
  80 //     GenCollectedHeap
  81 //     G1CollectedHeap
  82 //   ParallelScavengeHeap

  83 //
  84 class CollectedHeap : public CHeapObj<mtInternal> {
  85   friend class VMStructs;
  86   friend class IsGCActiveMark; // Block structured external access to _is_gc_active
  87 
  88 #ifdef ASSERT
  89   static int       _fire_out_of_memory_count;
  90 #endif
  91 
  92   // Used for filler objects (static, but initialized in ctor).
  93   static size_t _filler_array_max_size;
  94 
  95   GCHeapLog* _gc_heap_log;
  96 
  97   // Used in support of ReduceInitialCardMarks; only consulted if COMPILER2 is being used
  98   bool _defer_initial_card_mark;
  99 
 100  protected:
 101   MemRegion _reserved;
 102   BarrierSet* _barrier_set;


 171   static inline void fill_with_array(HeapWord* start, size_t words, bool zap = true);
 172 
 173   // Fill with a single object (either an int array or a java.lang.Object).
 174   static inline void fill_with_object_impl(HeapWord* start, size_t words, bool zap = true);
 175 
 176   virtual void trace_heap(GCWhen::Type when, GCTracer* tracer);
 177 
 178   // Verification functions
 179   virtual void check_for_bad_heap_word_value(HeapWord* addr, size_t size)
 180     PRODUCT_RETURN;
 181   virtual void check_for_non_bad_heap_word_value(HeapWord* addr, size_t size)
 182     PRODUCT_RETURN;
 183   debug_only(static void check_for_valid_allocation_state();)
 184 
 185  public:
 186   enum Name {
 187     Abstract,
 188     SharedHeap,
 189     GenCollectedHeap,
 190     ParallelScavengeHeap,
 191     G1CollectedHeap

 192   };
 193 
 194   static inline size_t filler_array_max_size() {
 195     return _filler_array_max_size;
 196   }
 197 
 198   virtual CollectedHeap::Name kind() const { return CollectedHeap::Abstract; }
 199 
 200   /**
 201    * Returns JNI error code JNI_ENOMEM if memory could not be allocated,
 202    * and JNI_OK on success.
 203    */
 204   virtual jint initialize() = 0;
 205 
 206   // In many heaps, there will be a need to perform some initialization activities
 207   // after the Universe is fully formed, but before general heap allocation is allowed.
 208   // This is the correct place to place such initialization methods.
 209   virtual void post_initialize() = 0;
 210 
 211   // Stop any onging concurrent work and prepare for exit.


 591   // Iterator for all GC threads (other than VM thread)
 592   virtual void gc_threads_do(ThreadClosure* tc) const = 0;
 593 
 594   // Print any relevant tracing info that flags imply.
 595   // Default implementation does nothing.
 596   virtual void print_tracing_info() const = 0;
 597 
 598   void print_heap_before_gc();
 599   void print_heap_after_gc();
 600 
 601   // Registering and unregistering an nmethod (compiled code) with the heap.
 602   // Override with specific mechanism for each specialized heap type.
 603   virtual void register_nmethod(nmethod* nm);
 604   virtual void unregister_nmethod(nmethod* nm);
 605 
 606   void trace_heap_before_gc(GCTracer* gc_tracer);
 607   void trace_heap_after_gc(GCTracer* gc_tracer);
 608 
 609   // Heap verification
 610   virtual void verify(bool silent, VerifyOption option) = 0;













 611 
 612   // Non product verification and debugging.
 613 #ifndef PRODUCT
 614   // Support for PromotionFailureALot.  Return true if it's time to cause a
 615   // promotion failure.  The no-argument version uses
 616   // this->_promotion_failure_alot_count as the counter.
 617   inline bool promotion_should_fail(volatile size_t* count);
 618   inline bool promotion_should_fail();
 619 
 620   // Reset the PromotionFailureALot counters.  Should be called at the end of a
 621   // GC in which promotion failure occurred.
 622   inline void reset_promotion_should_fail(volatile size_t* count);
 623   inline void reset_promotion_should_fail();
 624 #endif  // #ifndef PRODUCT
 625 
 626 #ifdef ASSERT
 627   static int fired_fake_oom() {
 628     return (CIFireOOMAt > 1 && _fire_out_of_memory_count >= CIFireOOMAt);
 629   }
 630 #endif




  63  private:
  64   void log_heap(bool before);
  65 
  66  public:
  67   GCHeapLog() : EventLogBase<GCMessage>("GC Heap History") {}
  68 
  69   void log_heap_before() {
  70     log_heap(true);
  71   }
  72   void log_heap_after() {
  73     log_heap(false);
  74   }
  75 };
  76 
  77 //
  78 // CollectedHeap
  79 //   SharedHeap
  80 //     GenCollectedHeap
  81 //     G1CollectedHeap
  82 //   ParallelScavengeHeap
  83 //   ShenandoahHeap
  84 //
  85 class CollectedHeap : public CHeapObj<mtInternal> {
  86   friend class VMStructs;
  87   friend class IsGCActiveMark; // Block structured external access to _is_gc_active
  88 
  89 #ifdef ASSERT
  90   static int       _fire_out_of_memory_count;
  91 #endif
  92 
  93   // Used for filler objects (static, but initialized in ctor).
  94   static size_t _filler_array_max_size;
  95 
  96   GCHeapLog* _gc_heap_log;
  97 
  98   // Used in support of ReduceInitialCardMarks; only consulted if COMPILER2 is being used
  99   bool _defer_initial_card_mark;
 100 
 101  protected:
 102   MemRegion _reserved;
 103   BarrierSet* _barrier_set;


 172   static inline void fill_with_array(HeapWord* start, size_t words, bool zap = true);
 173 
 174   // Fill with a single object (either an int array or a java.lang.Object).
 175   static inline void fill_with_object_impl(HeapWord* start, size_t words, bool zap = true);
 176 
 177   virtual void trace_heap(GCWhen::Type when, GCTracer* tracer);
 178 
 179   // Verification functions
 180   virtual void check_for_bad_heap_word_value(HeapWord* addr, size_t size)
 181     PRODUCT_RETURN;
 182   virtual void check_for_non_bad_heap_word_value(HeapWord* addr, size_t size)
 183     PRODUCT_RETURN;
 184   debug_only(static void check_for_valid_allocation_state();)
 185 
 186  public:
 187   enum Name {
 188     Abstract,
 189     SharedHeap,
 190     GenCollectedHeap,
 191     ParallelScavengeHeap,
 192     G1CollectedHeap,
 193     ShenandoahHeap
 194   };
 195 
 196   static inline size_t filler_array_max_size() {
 197     return _filler_array_max_size;
 198   }
 199 
 200   virtual CollectedHeap::Name kind() const { return CollectedHeap::Abstract; }
 201 
 202   /**
 203    * Returns JNI error code JNI_ENOMEM if memory could not be allocated,
 204    * and JNI_OK on success.
 205    */
 206   virtual jint initialize() = 0;
 207 
 208   // In many heaps, there will be a need to perform some initialization activities
 209   // after the Universe is fully formed, but before general heap allocation is allowed.
 210   // This is the correct place to place such initialization methods.
 211   virtual void post_initialize() = 0;
 212 
 213   // Stop any onging concurrent work and prepare for exit.


 593   // Iterator for all GC threads (other than VM thread)
 594   virtual void gc_threads_do(ThreadClosure* tc) const = 0;
 595 
 596   // Print any relevant tracing info that flags imply.
 597   // Default implementation does nothing.
 598   virtual void print_tracing_info() const = 0;
 599 
 600   void print_heap_before_gc();
 601   void print_heap_after_gc();
 602 
 603   // Registering and unregistering an nmethod (compiled code) with the heap.
 604   // Override with specific mechanism for each specialized heap type.
 605   virtual void register_nmethod(nmethod* nm);
 606   virtual void unregister_nmethod(nmethod* nm);
 607 
 608   void trace_heap_before_gc(GCTracer* gc_tracer);
 609   void trace_heap_after_gc(GCTracer* gc_tracer);
 610 
 611   // Heap verification
 612   virtual void verify(bool silent, VerifyOption option) = 0;
 613 
 614   // Shut down all GC workers and other GC related threads.
 615   virtual void shutdown();
 616 
 617   // Accumulate additional statistics from GCLABs.
 618   virtual void accumulate_statistics_all_gclabs();
 619 
 620   // Support for object pinning. This is used by JNI Get*Critical()
 621   // and Release*Critical() family of functions. If supported, the GC
 622   // must guarantee that pinned objects never move.
 623   virtual bool supports_object_pinning() const;
 624   virtual oop pin_object(JavaThread* thread, oop obj);
 625   virtual void unpin_object(JavaThread* thread, oop obj);
 626 
 627   // Non product verification and debugging.
 628 #ifndef PRODUCT
 629   // Support for PromotionFailureALot.  Return true if it's time to cause a
 630   // promotion failure.  The no-argument version uses
 631   // this->_promotion_failure_alot_count as the counter.
 632   inline bool promotion_should_fail(volatile size_t* count);
 633   inline bool promotion_should_fail();
 634 
 635   // Reset the PromotionFailureALot counters.  Should be called at the end of a
 636   // GC in which promotion failure occurred.
 637   inline void reset_promotion_should_fail(volatile size_t* count);
 638   inline void reset_promotion_should_fail();
 639 #endif  // #ifndef PRODUCT
 640 
 641 #ifdef ASSERT
 642   static int fired_fake_oom() {
 643     return (CIFireOOMAt > 1 && _fire_out_of_memory_count >= CIFireOOMAt);
 644   }
 645 #endif


< prev index next >