< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page

 773     ScavengableNMethods::nmethods_do(code_roots);
 774   }
 775   if (so & SO_AllCodeCache) {
 776     assert(code_roots != NULL, "must supply closure for code cache");
 777 
 778     // CMSCollector uses this to do intermediate-strength collections.
 779     // We scan the entire code cache, since CodeCache::do_unloading is not called.
 780     CodeCache::blobs_do(code_roots);
 781   }
 782   // Verify that the code cache contents are not subject to
 783   // movement by a scavenging collection.
 784   DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 785   DEBUG_ONLY(ScavengableNMethods::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 786 }
 787 
 788 void GenCollectedHeap::full_process_roots(bool is_adjust_phase,
 789                                           ScanningOption so,
 790                                           bool only_strong_roots,
 791                                           OopClosure* root_closure,
 792                                           CLDClosure* cld_closure) {
 793   MarkingCodeBlobClosure mark_code_closure(root_closure, is_adjust_phase);
 794   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 795 
 796   process_roots(so, root_closure, cld_closure, weak_cld_closure, &mark_code_closure);
 797 }
 798 
 799 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 800   WeakProcessor::oops_do(root_closure);
 801   _young_gen->ref_processor()->weak_oops_do(root_closure);
 802   _old_gen->ref_processor()->weak_oops_do(root_closure);
 803 }
 804 
 805 bool GenCollectedHeap::no_allocs_since_save_marks() {
 806   return _young_gen->no_allocs_since_save_marks() &&
 807          _old_gen->no_allocs_since_save_marks();
 808 }
 809 
 810 bool GenCollectedHeap::supports_inline_contig_alloc() const {
 811   return _young_gen->supports_inline_contig_alloc();
 812 }
 813 

 883                                           GenerationType last_generation) {
 884   do_collection(true,                   // full
 885                 clear_all_soft_refs,    // clear_all_soft_refs
 886                 0,                      // size
 887                 false,                  // is_tlab
 888                 last_generation);       // last_generation
 889   // Hack XXX FIX ME !!!
 890   // A scavenge may not have been attempted, or may have
 891   // been attempted and failed, because the old gen was too full
 892   if (gc_cause() == GCCause::_gc_locker && incremental_collection_failed()) {
 893     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");
 894     // This time allow the old gen to be collected as well
 895     do_collection(true,                // full
 896                   clear_all_soft_refs, // clear_all_soft_refs
 897                   0,                   // size
 898                   false,               // is_tlab
 899                   OldGen);             // last_generation
 900   }
 901 }
 902 
 903 bool GenCollectedHeap::is_in_young(oop p) {
 904   bool result = cast_from_oop<HeapWord*>(p) < _old_gen->reserved().start();
 905   assert(result == _young_gen->is_in_reserved(p),
 906          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 907   return result;
 908 }
 909 
 910 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 911 bool GenCollectedHeap::is_in(const void* p) const {
 912   return _young_gen->is_in(p) || _old_gen->is_in(p);
 913 }
 914 
 915 #ifdef ASSERT
 916 // Don't implement this by using is_in_young().  This method is used
 917 // in some cases to check that is_in_young() is correct.
 918 bool GenCollectedHeap::is_in_partial_collection(const void* p) {
 919   assert(is_in_reserved(p) || p == NULL,
 920     "Does not work if address is non-null and outside of the heap");
 921   return p < _young_gen->reserved().end() && p != NULL;
 922 }
 923 #endif

1161 
1162 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1163  private:
1164   bool _full;
1165  public:
1166   void do_generation(Generation* gen) {
1167     gen->gc_prologue(_full);
1168   }
1169   GenGCPrologueClosure(bool full) : _full(full) {};
1170 };
1171 
1172 void GenCollectedHeap::gc_prologue(bool full) {
1173   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1174 
1175   // Fill TLAB's and such
1176   ensure_parsability(true);   // retire TLABs
1177 
1178   // Walk generations
1179   GenGCPrologueClosure blk(full);
1180   generation_iterate(&blk, false);  // not old-to-young.




1181 };
1182 
1183 class GenGCEpilogueClosure: public GenCollectedHeap::GenClosure {
1184  private:
1185   bool _full;
1186  public:
1187   void do_generation(Generation* gen) {
1188     gen->gc_epilogue(_full);
1189   }
1190   GenGCEpilogueClosure(bool full) : _full(full) {};
1191 };
1192 
1193 void GenCollectedHeap::gc_epilogue(bool full) {
1194 #if COMPILER2_OR_JVMCI
1195   assert(DerivedPointerTable::is_empty(), "derived pointer present");
1196   size_t actual_gap = pointer_delta((HeapWord*) (max_uintx-3), *(end_addr()));
1197   guarantee(!CompilerConfig::is_c2_or_jvmci_compiler_enabled() || actual_gap > (size_t)FastAllocateSizeLimit, "inline allocation wraps");
1198 #endif // COMPILER2_OR_JVMCI
1199 




1200   resize_all_tlabs();
1201 
1202   GenGCEpilogueClosure blk(full);
1203   generation_iterate(&blk, false);  // not old-to-young.
1204 
1205   MetaspaceCounters::update_performance_counters();
1206 };
1207 
1208 #ifndef PRODUCT
1209 class GenGCSaveTopsBeforeGCClosure: public GenCollectedHeap::GenClosure {
1210  private:
1211  public:
1212   void do_generation(Generation* gen) {
1213     gen->record_spaces_top();
1214   }
1215 };
1216 
1217 void GenCollectedHeap::record_gen_tops_before_GC() {
1218   if (ZapUnusedHeapArea) {
1219     GenGCSaveTopsBeforeGCClosure blk;

1222 }
1223 #endif  // not PRODUCT
1224 
1225 class GenEnsureParsabilityClosure: public GenCollectedHeap::GenClosure {
1226  public:
1227   void do_generation(Generation* gen) {
1228     gen->ensure_parsability();
1229   }
1230 };
1231 
1232 void GenCollectedHeap::ensure_parsability(bool retire_tlabs) {
1233   CollectedHeap::ensure_parsability(retire_tlabs);
1234   GenEnsureParsabilityClosure ep_cl;
1235   generation_iterate(&ep_cl, false);
1236 }
1237 
1238 oop GenCollectedHeap::handle_failed_promotion(Generation* old_gen,
1239                                               oop obj,
1240                                               size_t obj_size) {
1241   guarantee(old_gen == _old_gen, "We only get here with an old generation");
1242   assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
1243   HeapWord* result = NULL;
1244 
1245   result = old_gen->expand_and_allocate(obj_size, false);
1246 
1247   if (result != NULL) {
1248     Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(obj), result, obj_size);
1249   }
1250   return cast_to_oop(result);
1251 }

 773     ScavengableNMethods::nmethods_do(code_roots);
 774   }
 775   if (so & SO_AllCodeCache) {
 776     assert(code_roots != NULL, "must supply closure for code cache");
 777 
 778     // CMSCollector uses this to do intermediate-strength collections.
 779     // We scan the entire code cache, since CodeCache::do_unloading is not called.
 780     CodeCache::blobs_do(code_roots);
 781   }
 782   // Verify that the code cache contents are not subject to
 783   // movement by a scavenging collection.
 784   DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 785   DEBUG_ONLY(ScavengableNMethods::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 786 }
 787 
 788 void GenCollectedHeap::full_process_roots(bool is_adjust_phase,
 789                                           ScanningOption so,
 790                                           bool only_strong_roots,
 791                                           OopClosure* root_closure,
 792                                           CLDClosure* cld_closure) {
 793   MarkingCodeBlobClosure mark_code_closure(root_closure, CodeBlobToOopClosure::FixRelocations, !is_adjust_phase);
 794   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 795 
 796   process_roots(so, root_closure, cld_closure, weak_cld_closure, &mark_code_closure);
 797 }
 798 
 799 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 800   WeakProcessor::oops_do(root_closure);
 801   _young_gen->ref_processor()->weak_oops_do(root_closure);
 802   _old_gen->ref_processor()->weak_oops_do(root_closure);
 803 }
 804 
 805 bool GenCollectedHeap::no_allocs_since_save_marks() {
 806   return _young_gen->no_allocs_since_save_marks() &&
 807          _old_gen->no_allocs_since_save_marks();
 808 }
 809 
 810 bool GenCollectedHeap::supports_inline_contig_alloc() const {
 811   return _young_gen->supports_inline_contig_alloc();
 812 }
 813 

 883                                           GenerationType last_generation) {
 884   do_collection(true,                   // full
 885                 clear_all_soft_refs,    // clear_all_soft_refs
 886                 0,                      // size
 887                 false,                  // is_tlab
 888                 last_generation);       // last_generation
 889   // Hack XXX FIX ME !!!
 890   // A scavenge may not have been attempted, or may have
 891   // been attempted and failed, because the old gen was too full
 892   if (gc_cause() == GCCause::_gc_locker && incremental_collection_failed()) {
 893     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");
 894     // This time allow the old gen to be collected as well
 895     do_collection(true,                // full
 896                   clear_all_soft_refs, // clear_all_soft_refs
 897                   0,                   // size
 898                   false,               // is_tlab
 899                   OldGen);             // last_generation
 900   }
 901 }
 902 
 903 bool GenCollectedHeap::is_in_young(oop p) const {
 904   bool result = cast_from_oop<HeapWord*>(p) < _old_gen->reserved().start();
 905   assert(result == _young_gen->is_in_reserved(p),
 906          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 907   return result;
 908 }
 909 
 910 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 911 bool GenCollectedHeap::is_in(const void* p) const {
 912   return _young_gen->is_in(p) || _old_gen->is_in(p);
 913 }
 914 
 915 #ifdef ASSERT
 916 // Don't implement this by using is_in_young().  This method is used
 917 // in some cases to check that is_in_young() is correct.
 918 bool GenCollectedHeap::is_in_partial_collection(const void* p) {
 919   assert(is_in_reserved(p) || p == NULL,
 920     "Does not work if address is non-null and outside of the heap");
 921   return p < _young_gen->reserved().end() && p != NULL;
 922 }
 923 #endif

1161 
1162 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1163  private:
1164   bool _full;
1165  public:
1166   void do_generation(Generation* gen) {
1167     gen->gc_prologue(_full);
1168   }
1169   GenGCPrologueClosure(bool full) : _full(full) {};
1170 };
1171 
1172 void GenCollectedHeap::gc_prologue(bool full) {
1173   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1174 
1175   // Fill TLAB's and such
1176   ensure_parsability(true);   // retire TLABs
1177 
1178   // Walk generations
1179   GenGCPrologueClosure blk(full);
1180   generation_iterate(&blk, false);  // not old-to-young.
1181 
1182   if (full) {
1183     CodeCache::increment_marking_cycle();
1184   }
1185 };
1186 
1187 class GenGCEpilogueClosure: public GenCollectedHeap::GenClosure {
1188  private:
1189   bool _full;
1190  public:
1191   void do_generation(Generation* gen) {
1192     gen->gc_epilogue(_full);
1193   }
1194   GenGCEpilogueClosure(bool full) : _full(full) {};
1195 };
1196 
1197 void GenCollectedHeap::gc_epilogue(bool full) {
1198 #if COMPILER2_OR_JVMCI
1199   assert(DerivedPointerTable::is_empty(), "derived pointer present");
1200   size_t actual_gap = pointer_delta((HeapWord*) (max_uintx-3), *(end_addr()));
1201   guarantee(!CompilerConfig::is_c2_or_jvmci_compiler_enabled() || actual_gap > (size_t)FastAllocateSizeLimit, "inline allocation wraps");
1202 #endif // COMPILER2_OR_JVMCI
1203 
1204   if (full) {
1205     CodeCache::increment_marking_cycle();
1206   }
1207 
1208   resize_all_tlabs();
1209 
1210   GenGCEpilogueClosure blk(full);
1211   generation_iterate(&blk, false);  // not old-to-young.
1212 
1213   MetaspaceCounters::update_performance_counters();
1214 };
1215 
1216 #ifndef PRODUCT
1217 class GenGCSaveTopsBeforeGCClosure: public GenCollectedHeap::GenClosure {
1218  private:
1219  public:
1220   void do_generation(Generation* gen) {
1221     gen->record_spaces_top();
1222   }
1223 };
1224 
1225 void GenCollectedHeap::record_gen_tops_before_GC() {
1226   if (ZapUnusedHeapArea) {
1227     GenGCSaveTopsBeforeGCClosure blk;

1230 }
1231 #endif  // not PRODUCT
1232 
1233 class GenEnsureParsabilityClosure: public GenCollectedHeap::GenClosure {
1234  public:
1235   void do_generation(Generation* gen) {
1236     gen->ensure_parsability();
1237   }
1238 };
1239 
1240 void GenCollectedHeap::ensure_parsability(bool retire_tlabs) {
1241   CollectedHeap::ensure_parsability(retire_tlabs);
1242   GenEnsureParsabilityClosure ep_cl;
1243   generation_iterate(&ep_cl, false);
1244 }
1245 
1246 oop GenCollectedHeap::handle_failed_promotion(Generation* old_gen,
1247                                               oop obj,
1248                                               size_t obj_size) {
1249   guarantee(old_gen == _old_gen, "We only get here with an old generation");
1250   assert(obj_size == (size_t)obj->compact_size(), "bad obj_size passed in");
1251   HeapWord* result = NULL;
1252 
1253   result = old_gen->expand_and_allocate(obj_size, false);
1254 
1255   if (result != NULL) {
1256     obj->copy_disjoint_compact(result, obj_size);
1257   }
1258   return cast_to_oop(result);
1259 }
< prev index next >