< prev index next >

src/hotspot/share/gc/shared/genCollectedHeap.cpp

Print this page

 773     ScavengableNMethods::nmethods_do(code_roots);
 774   }
 775   if (so & SO_AllCodeCache) {
 776     assert(code_roots != NULL, "must supply closure for code cache");
 777 
 778     // CMSCollector uses this to do intermediate-strength collections.
 779     // We scan the entire code cache, since CodeCache::do_unloading is not called.
 780     CodeCache::blobs_do(code_roots);
 781   }
 782   // Verify that the code cache contents are not subject to
 783   // movement by a scavenging collection.
 784   DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 785   DEBUG_ONLY(ScavengableNMethods::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 786 }
 787 
 788 void GenCollectedHeap::full_process_roots(bool is_adjust_phase,
 789                                           ScanningOption so,
 790                                           bool only_strong_roots,
 791                                           OopClosure* root_closure,
 792                                           CLDClosure* cld_closure) {
 793   MarkingCodeBlobClosure mark_code_closure(root_closure, is_adjust_phase);
 794   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 795 
 796   process_roots(so, root_closure, cld_closure, weak_cld_closure, &mark_code_closure);
 797 }
 798 
 799 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 800   WeakProcessor::oops_do(root_closure);
 801 }
 802 
 803 bool GenCollectedHeap::no_allocs_since_save_marks() {
 804   return _young_gen->no_allocs_since_save_marks() &&
 805          _old_gen->no_allocs_since_save_marks();
 806 }
 807 
 808 bool GenCollectedHeap::supports_inline_contig_alloc() const {
 809   return _young_gen->supports_inline_contig_alloc();
 810 }
 811 
 812 HeapWord* volatile* GenCollectedHeap::top_addr() const {
 813   return _young_gen->top_addr();

 881                                           GenerationType last_generation) {
 882   do_collection(true,                   // full
 883                 clear_all_soft_refs,    // clear_all_soft_refs
 884                 0,                      // size
 885                 false,                  // is_tlab
 886                 last_generation);       // last_generation
 887   // Hack XXX FIX ME !!!
 888   // A scavenge may not have been attempted, or may have
 889   // been attempted and failed, because the old gen was too full
 890   if (gc_cause() == GCCause::_gc_locker && incremental_collection_failed()) {
 891     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");
 892     // This time allow the old gen to be collected as well
 893     do_collection(true,                // full
 894                   clear_all_soft_refs, // clear_all_soft_refs
 895                   0,                   // size
 896                   false,               // is_tlab
 897                   OldGen);             // last_generation
 898   }
 899 }
 900 
 901 bool GenCollectedHeap::is_in_young(oop p) {
 902   bool result = cast_from_oop<HeapWord*>(p) < _old_gen->reserved().start();
 903   assert(result == _young_gen->is_in_reserved(p),
 904          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 905   return result;
 906 }
 907 
 908 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 909 bool GenCollectedHeap::is_in(const void* p) const {
 910   return _young_gen->is_in(p) || _old_gen->is_in(p);
 911 }
 912 
 913 #ifdef ASSERT
 914 // Don't implement this by using is_in_young().  This method is used
 915 // in some cases to check that is_in_young() is correct.
 916 bool GenCollectedHeap::is_in_partial_collection(const void* p) {
 917   assert(is_in_reserved(p) || p == NULL,
 918     "Does not work if address is non-null and outside of the heap");
 919   return p < _young_gen->reserved().end() && p != NULL;
 920 }
 921 #endif

1151 
1152 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1153  private:
1154   bool _full;
1155  public:
1156   void do_generation(Generation* gen) {
1157     gen->gc_prologue(_full);
1158   }
1159   GenGCPrologueClosure(bool full) : _full(full) {};
1160 };
1161 
1162 void GenCollectedHeap::gc_prologue(bool full) {
1163   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1164 
1165   // Fill TLAB's and such
1166   ensure_parsability(true);   // retire TLABs
1167 
1168   // Walk generations
1169   GenGCPrologueClosure blk(full);
1170   generation_iterate(&blk, false);  // not old-to-young.




1171 };
1172 
1173 class GenGCEpilogueClosure: public GenCollectedHeap::GenClosure {
1174  private:
1175   bool _full;
1176  public:
1177   void do_generation(Generation* gen) {
1178     gen->gc_epilogue(_full);
1179   }
1180   GenGCEpilogueClosure(bool full) : _full(full) {};
1181 };
1182 
1183 void GenCollectedHeap::gc_epilogue(bool full) {
1184 #if COMPILER2_OR_JVMCI
1185   assert(DerivedPointerTable::is_empty(), "derived pointer present");
1186   size_t actual_gap = pointer_delta((HeapWord*) (max_uintx-3), *(end_addr()));
1187   guarantee(!CompilerConfig::is_c2_or_jvmci_compiler_enabled() || actual_gap > (size_t)FastAllocateSizeLimit, "inline allocation wraps");
1188 #endif // COMPILER2_OR_JVMCI
1189 




1190   resize_all_tlabs();
1191 
1192   GenGCEpilogueClosure blk(full);
1193   generation_iterate(&blk, false);  // not old-to-young.
1194 
1195   MetaspaceCounters::update_performance_counters();
1196 };
1197 
1198 #ifndef PRODUCT
1199 class GenGCSaveTopsBeforeGCClosure: public GenCollectedHeap::GenClosure {
1200  private:
1201  public:
1202   void do_generation(Generation* gen) {
1203     gen->record_spaces_top();
1204   }
1205 };
1206 
1207 void GenCollectedHeap::record_gen_tops_before_GC() {
1208   if (ZapUnusedHeapArea) {
1209     GenGCSaveTopsBeforeGCClosure blk;

1212 }
1213 #endif  // not PRODUCT
1214 
1215 class GenEnsureParsabilityClosure: public GenCollectedHeap::GenClosure {
1216  public:
1217   void do_generation(Generation* gen) {
1218     gen->ensure_parsability();
1219   }
1220 };
1221 
1222 void GenCollectedHeap::ensure_parsability(bool retire_tlabs) {
1223   CollectedHeap::ensure_parsability(retire_tlabs);
1224   GenEnsureParsabilityClosure ep_cl;
1225   generation_iterate(&ep_cl, false);
1226 }
1227 
1228 oop GenCollectedHeap::handle_failed_promotion(Generation* old_gen,
1229                                               oop obj,
1230                                               size_t obj_size) {
1231   guarantee(old_gen == _old_gen, "We only get here with an old generation");
1232   assert(obj_size == obj->size(), "bad obj_size passed in");
1233   HeapWord* result = NULL;
1234 
1235   result = old_gen->expand_and_allocate(obj_size, false);
1236 
1237   if (result != NULL) {
1238     Copy::aligned_disjoint_words(cast_from_oop<HeapWord*>(obj), result, obj_size);
1239   }
1240   return cast_to_oop(result);
1241 }

 773     ScavengableNMethods::nmethods_do(code_roots);
 774   }
 775   if (so & SO_AllCodeCache) {
 776     assert(code_roots != NULL, "must supply closure for code cache");
 777 
 778     // CMSCollector uses this to do intermediate-strength collections.
 779     // We scan the entire code cache, since CodeCache::do_unloading is not called.
 780     CodeCache::blobs_do(code_roots);
 781   }
 782   // Verify that the code cache contents are not subject to
 783   // movement by a scavenging collection.
 784   DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 785   DEBUG_ONLY(ScavengableNMethods::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 786 }
 787 
 788 void GenCollectedHeap::full_process_roots(bool is_adjust_phase,
 789                                           ScanningOption so,
 790                                           bool only_strong_roots,
 791                                           OopClosure* root_closure,
 792                                           CLDClosure* cld_closure) {
 793   MarkingCodeBlobClosure mark_code_closure(root_closure, CodeBlobToOopClosure::FixRelocations, !is_adjust_phase);
 794   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 795 
 796   process_roots(so, root_closure, cld_closure, weak_cld_closure, &mark_code_closure);
 797 }
 798 
 799 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 800   WeakProcessor::oops_do(root_closure);
 801 }
 802 
 803 bool GenCollectedHeap::no_allocs_since_save_marks() {
 804   return _young_gen->no_allocs_since_save_marks() &&
 805          _old_gen->no_allocs_since_save_marks();
 806 }
 807 
 808 bool GenCollectedHeap::supports_inline_contig_alloc() const {
 809   return _young_gen->supports_inline_contig_alloc();
 810 }
 811 
 812 HeapWord* volatile* GenCollectedHeap::top_addr() const {
 813   return _young_gen->top_addr();

 881                                           GenerationType last_generation) {
 882   do_collection(true,                   // full
 883                 clear_all_soft_refs,    // clear_all_soft_refs
 884                 0,                      // size
 885                 false,                  // is_tlab
 886                 last_generation);       // last_generation
 887   // Hack XXX FIX ME !!!
 888   // A scavenge may not have been attempted, or may have
 889   // been attempted and failed, because the old gen was too full
 890   if (gc_cause() == GCCause::_gc_locker && incremental_collection_failed()) {
 891     log_debug(gc, jni)("GC locker: Trying a full collection because scavenge failed");
 892     // This time allow the old gen to be collected as well
 893     do_collection(true,                // full
 894                   clear_all_soft_refs, // clear_all_soft_refs
 895                   0,                   // size
 896                   false,               // is_tlab
 897                   OldGen);             // last_generation
 898   }
 899 }
 900 
 901 bool GenCollectedHeap::is_in_young(oop p) const {
 902   bool result = cast_from_oop<HeapWord*>(p) < _old_gen->reserved().start();
 903   assert(result == _young_gen->is_in_reserved(p),
 904          "incorrect test - result=%d, p=" INTPTR_FORMAT, result, p2i((void*)p));
 905   return result;
 906 }
 907 
 908 // Returns "TRUE" iff "p" points into the committed areas of the heap.
 909 bool GenCollectedHeap::is_in(const void* p) const {
 910   return _young_gen->is_in(p) || _old_gen->is_in(p);
 911 }
 912 
 913 #ifdef ASSERT
 914 // Don't implement this by using is_in_young().  This method is used
 915 // in some cases to check that is_in_young() is correct.
 916 bool GenCollectedHeap::is_in_partial_collection(const void* p) {
 917   assert(is_in_reserved(p) || p == NULL,
 918     "Does not work if address is non-null and outside of the heap");
 919   return p < _young_gen->reserved().end() && p != NULL;
 920 }
 921 #endif

1151 
1152 class GenGCPrologueClosure: public GenCollectedHeap::GenClosure {
1153  private:
1154   bool _full;
1155  public:
1156   void do_generation(Generation* gen) {
1157     gen->gc_prologue(_full);
1158   }
1159   GenGCPrologueClosure(bool full) : _full(full) {};
1160 };
1161 
1162 void GenCollectedHeap::gc_prologue(bool full) {
1163   assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
1164 
1165   // Fill TLAB's and such
1166   ensure_parsability(true);   // retire TLABs
1167 
1168   // Walk generations
1169   GenGCPrologueClosure blk(full);
1170   generation_iterate(&blk, false);  // not old-to-young.
1171 
1172   if (full) {
1173     CodeCache::increment_marking_cycle();
1174   }
1175 };
1176 
1177 class GenGCEpilogueClosure: public GenCollectedHeap::GenClosure {
1178  private:
1179   bool _full;
1180  public:
1181   void do_generation(Generation* gen) {
1182     gen->gc_epilogue(_full);
1183   }
1184   GenGCEpilogueClosure(bool full) : _full(full) {};
1185 };
1186 
1187 void GenCollectedHeap::gc_epilogue(bool full) {
1188 #if COMPILER2_OR_JVMCI
1189   assert(DerivedPointerTable::is_empty(), "derived pointer present");
1190   size_t actual_gap = pointer_delta((HeapWord*) (max_uintx-3), *(end_addr()));
1191   guarantee(!CompilerConfig::is_c2_or_jvmci_compiler_enabled() || actual_gap > (size_t)FastAllocateSizeLimit, "inline allocation wraps");
1192 #endif // COMPILER2_OR_JVMCI
1193 
1194   if (full) {
1195     CodeCache::increment_marking_cycle();
1196   }
1197 
1198   resize_all_tlabs();
1199 
1200   GenGCEpilogueClosure blk(full);
1201   generation_iterate(&blk, false);  // not old-to-young.
1202 
1203   MetaspaceCounters::update_performance_counters();
1204 };
1205 
1206 #ifndef PRODUCT
1207 class GenGCSaveTopsBeforeGCClosure: public GenCollectedHeap::GenClosure {
1208  private:
1209  public:
1210   void do_generation(Generation* gen) {
1211     gen->record_spaces_top();
1212   }
1213 };
1214 
1215 void GenCollectedHeap::record_gen_tops_before_GC() {
1216   if (ZapUnusedHeapArea) {
1217     GenGCSaveTopsBeforeGCClosure blk;

1220 }
1221 #endif  // not PRODUCT
1222 
1223 class GenEnsureParsabilityClosure: public GenCollectedHeap::GenClosure {
1224  public:
1225   void do_generation(Generation* gen) {
1226     gen->ensure_parsability();
1227   }
1228 };
1229 
1230 void GenCollectedHeap::ensure_parsability(bool retire_tlabs) {
1231   CollectedHeap::ensure_parsability(retire_tlabs);
1232   GenEnsureParsabilityClosure ep_cl;
1233   generation_iterate(&ep_cl, false);
1234 }
1235 
1236 oop GenCollectedHeap::handle_failed_promotion(Generation* old_gen,
1237                                               oop obj,
1238                                               size_t obj_size) {
1239   guarantee(old_gen == _old_gen, "We only get here with an old generation");
1240   assert(obj_size == obj->compact_size(), "bad obj_size passed in");
1241   HeapWord* result = NULL;
1242 
1243   result = old_gen->expand_and_allocate(obj_size, false);
1244 
1245   if (result != NULL) {
1246     obj->copy_disjoint_compact(result, obj_size);
1247   }
1248   return cast_to_oop(result);
1249 }
< prev index next >