< prev index next >

src/share/vm/memory/universe.cpp

Print this page




  63 #include "runtime/init.hpp"
  64 #include "runtime/java.hpp"
  65 #include "runtime/javaCalls.hpp"
  66 #include "runtime/sharedRuntime.hpp"
  67 #include "runtime/synchronizer.hpp"
  68 #include "runtime/thread.inline.hpp"
  69 #include "runtime/timer.hpp"
  70 #include "runtime/vm_operations.hpp"
  71 #include "services/memoryService.hpp"
  72 #include "utilities/copy.hpp"
  73 #include "utilities/events.hpp"
  74 #include "utilities/hashtable.inline.hpp"
  75 #include "utilities/preserveException.hpp"
  76 #include "utilities/macros.hpp"
  77 #if INCLUDE_ALL_GCS
  78 #include "gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.hpp"
  79 #include "gc_implementation/concurrentMarkSweep/cmsCollectorPolicy.hpp"
  80 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  81 #include "gc_implementation/g1/g1CollectorPolicy_ext.hpp"
  82 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"


  83 #endif // INCLUDE_ALL_GCS
  84 
  85 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  86 
  87 // Known objects
  88 Klass* Universe::_boolArrayKlassObj                 = NULL;
  89 Klass* Universe::_byteArrayKlassObj                 = NULL;
  90 Klass* Universe::_charArrayKlassObj                 = NULL;
  91 Klass* Universe::_intArrayKlassObj                  = NULL;
  92 Klass* Universe::_shortArrayKlassObj                = NULL;
  93 Klass* Universe::_longArrayKlassObj                 = NULL;
  94 Klass* Universe::_singleArrayKlassObj               = NULL;
  95 Klass* Universe::_doubleArrayKlassObj               = NULL;
  96 Klass* Universe::_typeArrayKlassObjs[T_VOID+1]      = { NULL /*, NULL...*/ };
  97 Klass* Universe::_objectArrayKlassObj               = NULL;
  98 oop Universe::_int_mirror                             = NULL;
  99 oop Universe::_float_mirror                           = NULL;
 100 oop Universe::_double_mirror                          = NULL;
 101 oop Universe::_byte_mirror                            = NULL;
 102 oop Universe::_bool_mirror                            = NULL;


 557 }
 558 
 559 
 560 void Universe::reinitialize_itables(TRAPS) {
 561   SystemDictionary::classes_do(initialize_itable_for_klass, CHECK);
 562 
 563 }
 564 
 565 
 566 bool Universe::on_page_boundary(void* addr) {
 567   return ((uintptr_t) addr) % os::vm_page_size() == 0;
 568 }
 569 
 570 
 571 bool Universe::should_fill_in_stack_trace(Handle throwable) {
 572   // never attempt to fill in the stack trace of preallocated errors that do not have
 573   // backtrace. These errors are kept alive forever and may be "re-used" when all
 574   // preallocated errors with backtrace have been consumed. Also need to avoid
 575   // a potential loop which could happen if an out of memory occurs when attempting
 576   // to allocate the backtrace.
 577   return ((throwable() != Universe::_out_of_memory_error_java_heap) &&
 578           (throwable() != Universe::_out_of_memory_error_metaspace)  &&
 579           (throwable() != Universe::_out_of_memory_error_class_metaspace)  &&
 580           (throwable() != Universe::_out_of_memory_error_array_size) &&
 581           (throwable() != Universe::_out_of_memory_error_gc_overhead_limit) &&
 582           (throwable() != Universe::_out_of_memory_error_realloc_objects));
 583 }
 584 
 585 
 586 oop Universe::gen_out_of_memory_error(oop default_err) {
 587   // generate an out of memory error:
 588   // - if there is a preallocated error with backtrace available then return it wth
 589   //   a filled in stack trace.
 590   // - if there are no preallocated errors with backtrace available then return
 591   //   an error without backtrace.
 592   int next;
 593   if (_preallocated_out_of_memory_error_avail_count > 0) {
 594     next = (int)Atomic::add(-1, &_preallocated_out_of_memory_error_avail_count);
 595     assert(next < (int)PreallocatedOutOfMemoryErrorCount, "avail count is corrupt");
 596   } else {
 597     next = -1;
 598   }
 599   if (next < 0) {
 600     // all preallocated errors have been used.
 601     // return default
 602     return default_err;


 796   return (char*)base; // also return NULL (don't care) for 32-bit VM
 797 }
 798 
 799 jint Universe::initialize_heap() {
 800 
 801   if (UseParallelGC) {
 802 #if INCLUDE_ALL_GCS
 803     Universe::_collectedHeap = new ParallelScavengeHeap();
 804 #else  // INCLUDE_ALL_GCS
 805     fatal("UseParallelGC not supported in this VM.");
 806 #endif // INCLUDE_ALL_GCS
 807 
 808   } else if (UseG1GC) {
 809 #if INCLUDE_ALL_GCS
 810     G1CollectorPolicyExt* g1p = new G1CollectorPolicyExt();
 811     g1p->initialize_all();
 812     G1CollectedHeap* g1h = new G1CollectedHeap(g1p);
 813     Universe::_collectedHeap = g1h;
 814 #else  // INCLUDE_ALL_GCS
 815     fatal("UseG1GC not supported in java kernel vm.");









 816 #endif // INCLUDE_ALL_GCS
 817 
 818   } else {
 819     GenCollectorPolicy *gc_policy;
 820 
 821     if (UseSerialGC) {
 822       gc_policy = new MarkSweepPolicy();
 823     } else if (UseConcMarkSweepGC) {
 824 #if INCLUDE_ALL_GCS
 825       if (UseAdaptiveSizePolicy) {
 826         gc_policy = new ASConcurrentMarkSweepPolicy();
 827       } else {
 828         gc_policy = new ConcurrentMarkSweepPolicy();
 829       }
 830 #else  // INCLUDE_ALL_GCS
 831     fatal("UseConcMarkSweepGC not supported in this VM.");
 832 #endif // INCLUDE_ALL_GCS
 833     } else { // default old generation
 834       gc_policy = new MarkSweepPolicy();
 835     }




  63 #include "runtime/init.hpp"
  64 #include "runtime/java.hpp"
  65 #include "runtime/javaCalls.hpp"
  66 #include "runtime/sharedRuntime.hpp"
  67 #include "runtime/synchronizer.hpp"
  68 #include "runtime/thread.inline.hpp"
  69 #include "runtime/timer.hpp"
  70 #include "runtime/vm_operations.hpp"
  71 #include "services/memoryService.hpp"
  72 #include "utilities/copy.hpp"
  73 #include "utilities/events.hpp"
  74 #include "utilities/hashtable.inline.hpp"
  75 #include "utilities/preserveException.hpp"
  76 #include "utilities/macros.hpp"
  77 #if INCLUDE_ALL_GCS
  78 #include "gc_implementation/concurrentMarkSweep/cmsAdaptiveSizePolicy.hpp"
  79 #include "gc_implementation/concurrentMarkSweep/cmsCollectorPolicy.hpp"
  80 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  81 #include "gc_implementation/g1/g1CollectorPolicy_ext.hpp"
  82 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
  83 #include "gc_implementation/shenandoah/shenandoahHeap.hpp"
  84 #include "gc_implementation/shenandoah/shenandoahCollectorPolicy.hpp"
  85 #endif // INCLUDE_ALL_GCS
  86 
  87 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  88 
  89 // Known objects
  90 Klass* Universe::_boolArrayKlassObj                 = NULL;
  91 Klass* Universe::_byteArrayKlassObj                 = NULL;
  92 Klass* Universe::_charArrayKlassObj                 = NULL;
  93 Klass* Universe::_intArrayKlassObj                  = NULL;
  94 Klass* Universe::_shortArrayKlassObj                = NULL;
  95 Klass* Universe::_longArrayKlassObj                 = NULL;
  96 Klass* Universe::_singleArrayKlassObj               = NULL;
  97 Klass* Universe::_doubleArrayKlassObj               = NULL;
  98 Klass* Universe::_typeArrayKlassObjs[T_VOID+1]      = { NULL /*, NULL...*/ };
  99 Klass* Universe::_objectArrayKlassObj               = NULL;
 100 oop Universe::_int_mirror                             = NULL;
 101 oop Universe::_float_mirror                           = NULL;
 102 oop Universe::_double_mirror                          = NULL;
 103 oop Universe::_byte_mirror                            = NULL;
 104 oop Universe::_bool_mirror                            = NULL;


 559 }
 560 
 561 
 562 void Universe::reinitialize_itables(TRAPS) {
 563   SystemDictionary::classes_do(initialize_itable_for_klass, CHECK);
 564 
 565 }
 566 
 567 
 568 bool Universe::on_page_boundary(void* addr) {
 569   return ((uintptr_t) addr) % os::vm_page_size() == 0;
 570 }
 571 
 572 
 573 bool Universe::should_fill_in_stack_trace(Handle throwable) {
 574   // never attempt to fill in the stack trace of preallocated errors that do not have
 575   // backtrace. These errors are kept alive forever and may be "re-used" when all
 576   // preallocated errors with backtrace have been consumed. Also need to avoid
 577   // a potential loop which could happen if an out of memory occurs when attempting
 578   // to allocate the backtrace.
 579   return ((!oopDesc::equals(throwable(), Universe::_out_of_memory_error_java_heap)) &&
 580           (!oopDesc::equals(throwable(), Universe::_out_of_memory_error_metaspace))  &&
 581           (!oopDesc::equals(throwable(), Universe::_out_of_memory_error_class_metaspace))  &&
 582           (!oopDesc::equals(throwable(), Universe::_out_of_memory_error_array_size)) &&
 583           (!oopDesc::equals(throwable(), Universe::_out_of_memory_error_gc_overhead_limit)) &&
 584           (!oopDesc::equals(throwable(), Universe::_out_of_memory_error_realloc_objects)));
 585 }
 586 
 587 
 588 oop Universe::gen_out_of_memory_error(oop default_err) {
 589   // generate an out of memory error:
 590   // - if there is a preallocated error with backtrace available then return it wth
 591   //   a filled in stack trace.
 592   // - if there are no preallocated errors with backtrace available then return
 593   //   an error without backtrace.
 594   int next;
 595   if (_preallocated_out_of_memory_error_avail_count > 0) {
 596     next = (int)Atomic::add(-1, &_preallocated_out_of_memory_error_avail_count);
 597     assert(next < (int)PreallocatedOutOfMemoryErrorCount, "avail count is corrupt");
 598   } else {
 599     next = -1;
 600   }
 601   if (next < 0) {
 602     // all preallocated errors have been used.
 603     // return default
 604     return default_err;


 798   return (char*)base; // also return NULL (don't care) for 32-bit VM
 799 }
 800 
 801 jint Universe::initialize_heap() {
 802 
 803   if (UseParallelGC) {
 804 #if INCLUDE_ALL_GCS
 805     Universe::_collectedHeap = new ParallelScavengeHeap();
 806 #else  // INCLUDE_ALL_GCS
 807     fatal("UseParallelGC not supported in this VM.");
 808 #endif // INCLUDE_ALL_GCS
 809 
 810   } else if (UseG1GC) {
 811 #if INCLUDE_ALL_GCS
 812     G1CollectorPolicyExt* g1p = new G1CollectorPolicyExt();
 813     g1p->initialize_all();
 814     G1CollectedHeap* g1h = new G1CollectedHeap(g1p);
 815     Universe::_collectedHeap = g1h;
 816 #else  // INCLUDE_ALL_GCS
 817     fatal("UseG1GC not supported in java kernel vm.");
 818 #endif // INCLUDE_ALL_GCS
 819 
 820   } else if (UseShenandoahGC) {
 821 #if INCLUDE_ALL_GCS
 822     ShenandoahCollectorPolicy* shcp = new ShenandoahCollectorPolicy();
 823     ShenandoahHeap* sh = new ShenandoahHeap(shcp);
 824     Universe::_collectedHeap = sh;
 825 #else  // INCLUDE_ALL_GCS
 826     fatal("UseShenandoahGC not supported in java kernel vm.");
 827 #endif // INCLUDE_ALL_GCS
 828 
 829   } else {
 830     GenCollectorPolicy *gc_policy;
 831 
 832     if (UseSerialGC) {
 833       gc_policy = new MarkSweepPolicy();
 834     } else if (UseConcMarkSweepGC) {
 835 #if INCLUDE_ALL_GCS
 836       if (UseAdaptiveSizePolicy) {
 837         gc_policy = new ASConcurrentMarkSweepPolicy();
 838       } else {
 839         gc_policy = new ConcurrentMarkSweepPolicy();
 840       }
 841 #else  // INCLUDE_ALL_GCS
 842     fatal("UseConcMarkSweepGC not supported in this VM.");
 843 #endif // INCLUDE_ALL_GCS
 844     } else { // default old generation
 845       gc_policy = new MarkSweepPolicy();
 846     }


< prev index next >