< prev index next >

src/hotspot/share/gc/parallel/psMarkSweep.cpp

Print this page




 176   {
 177     HandleMark hm;
 178 
 179     GCTraceCPUTime tcpu;
 180     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause, true);
 181 
 182     heap->pre_full_gc_dump(_gc_timer);
 183 
 184     TraceCollectorStats tcs(counters());
 185     TraceMemoryManagerStats tms(heap->old_gc_manager(),gc_cause);
 186 
 187     if (log_is_enabled(Debug, gc, heap, exit)) {
 188       accumulated_time()->start();
 189     }
 190 
 191     // Let the size policy know we're starting
 192     size_policy->major_collection_begin();
 193 
 194     BiasedLocking::preserve_marks();
 195 
 196     const PreGenGCValues pre_gc_values = heap->get_pre_gc_values();




 197 
 198     allocate_stacks();
 199 
 200 #if COMPILER2_OR_JVMCI
 201     DerivedPointerTable::clear();
 202 #endif
 203 
 204     ref_processor()->enable_discovery();
 205     ref_processor()->setup_policy(clear_all_softrefs);
 206 
 207     mark_sweep_phase1(clear_all_softrefs);
 208 
 209     mark_sweep_phase2();
 210 
 211 #if COMPILER2_OR_JVMCI
 212     // Don't add any more derived pointers during phase3
 213     assert(DerivedPointerTable::is_active(), "Sanity");
 214     DerivedPointerTable::set_active(false);
 215 #endif
 216 


 238     Universe::update_heap_info_at_gc();
 239 
 240     survivors_empty = young_gen->from_space()->is_empty() &&
 241                       young_gen->to_space()->is_empty();
 242     young_gen_empty = eden_empty && survivors_empty;
 243 
 244     PSCardTable* card_table = heap->card_table();
 245     MemRegion old_mr = heap->old_gen()->reserved();
 246     if (young_gen_empty) {
 247       card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
 248     } else {
 249       card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 250     }
 251 
 252     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 253     ClassLoaderDataGraph::purge();
 254     MetaspaceUtils::verify_metrics();
 255 
 256     BiasedLocking::restore_marks();
 257     heap->prune_scavengable_nmethods();

 258 
 259 #if COMPILER2_OR_JVMCI
 260     DerivedPointerTable::update_pointers();
 261 #endif
 262 
 263     assert(!ref_processor()->discovery_enabled(), "Should have been disabled earlier");
 264 
 265     // Update time of last GC
 266     reset_millis_since_last_gc();
 267 
 268     // Let the size policy know we're done
 269     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 270 
 271     if (UseAdaptiveSizePolicy) {
 272 
 273      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 274      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 275                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 276 
 277       // Don't check if the size_policy is ready here.  Let


 331       log_debug(gc, ergo)("AdaptiveSizeStop: collection: %d ", heap->total_collections());
 332     }
 333 
 334     if (UsePerfData) {
 335       heap->gc_policy_counters()->update_counters();
 336       heap->gc_policy_counters()->update_old_capacity(
 337         old_gen->capacity_in_bytes());
 338       heap->gc_policy_counters()->update_young_capacity(
 339         young_gen->capacity_in_bytes());
 340     }
 341 
 342     heap->resize_all_tlabs();
 343 
 344     // We collected the heap, recalculate the metaspace capacity
 345     MetaspaceGC::compute_new_size();
 346 
 347     if (log_is_enabled(Debug, gc, heap, exit)) {
 348       accumulated_time()->stop();
 349     }
 350 
 351     heap->print_heap_change(pre_gc_values);


 352 
 353     // Track memory usage and detect low memory
 354     MemoryService::track_memory_usage();
 355     heap->update_counters();
 356 
 357     heap->post_full_gc_dump(_gc_timer);
 358   }
 359 
 360   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
 361     HandleMark hm;  // Discard invalid handles created during verification
 362     Universe::verify("After GC");
 363   }
 364 
 365   // Re-verify object start arrays
 366   if (VerifyObjectStartArray &&
 367       VerifyAfterGC) {
 368     old_gen->verify_object_start_array();
 369   }
 370 
 371   if (ZapUnusedHeapArea) {


 505   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 506 
 507   // Need to clear claim bits before the tracing starts.
 508   ClassLoaderDataGraph::clear_claimed_marks();
 509 
 510   // General strong roots.
 511   {
 512     ParallelScavengeHeap::ParStrongRootsScope psrs;
 513     Universe::oops_do(mark_and_push_closure());
 514     JNIHandles::oops_do(mark_and_push_closure());   // Global (strong) JNI handles
 515     MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
 516     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 517     ObjectSynchronizer::oops_do(mark_and_push_closure());
 518     Management::oops_do(mark_and_push_closure());
 519     JvmtiExport::oops_do(mark_and_push_closure());
 520     SystemDictionary::oops_do(mark_and_push_closure());
 521     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 522     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 523     //ScavengableNMethods::scavengable_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 524     AOT_ONLY(AOTLoader::oops_do(mark_and_push_closure());)

 525   }
 526 
 527   // Flush marking stack.
 528   follow_stack();
 529 
 530   // Process reference objects found during marking
 531   {
 532     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 533 
 534     ref_processor()->setup_policy(clear_all_softrefs);
 535     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->max_num_queues());
 536     const ReferenceProcessorStats& stats =
 537       ref_processor()->process_discovered_references(
 538         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
 539     gc_tracer()->report_gc_reference_stats(stats);
 540     pt.print_all_references();
 541   }
 542 
 543   // This is the point where the entire marking should have completed.
 544   assert(_marking_stack.is_empty(), "Marking should have completed");


 599   ClassLoaderDataGraph::clear_claimed_marks();
 600 
 601   // General strong roots.
 602   Universe::oops_do(adjust_pointer_closure());
 603   JNIHandles::oops_do(adjust_pointer_closure());   // Global (strong) JNI handles
 604   Threads::oops_do(adjust_pointer_closure(), NULL);
 605   ObjectSynchronizer::oops_do(adjust_pointer_closure());
 606   Management::oops_do(adjust_pointer_closure());
 607   JvmtiExport::oops_do(adjust_pointer_closure());
 608   SystemDictionary::oops_do(adjust_pointer_closure());
 609   ClassLoaderDataGraph::cld_do(adjust_cld_closure());
 610 
 611   // Now adjust pointers in remaining weak roots.  (All of which should
 612   // have been cleared if they pointed to non-surviving objects.)
 613   // Global (weak) JNI handles
 614   WeakProcessor::oops_do(adjust_pointer_closure());
 615 
 616   CodeBlobToOopClosure adjust_from_blobs(adjust_pointer_closure(), CodeBlobToOopClosure::FixRelocations);
 617   CodeCache::blobs_do(&adjust_from_blobs);
 618   AOT_ONLY(AOTLoader::oops_do(adjust_pointer_closure());)


 619 
 620   ref_processor()->weak_oops_do(adjust_pointer_closure());
 621   PSScavenge::reference_processor()->weak_oops_do(adjust_pointer_closure());
 622 
 623   adjust_marks();
 624 
 625   young_gen->adjust_pointers();
 626   old_gen->adjust_pointers();
 627 }
 628 
 629 void PSMarkSweep::mark_sweep_phase4() {
 630   EventMark m("4 compact heap");
 631   GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", _gc_timer);
 632 
 633   // All pointers are now adjusted, move objects accordingly
 634 
 635   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 636   PSYoungGen* young_gen = heap->young_gen();
 637   PSOldGen* old_gen = heap->old_gen();
 638 




 176   {
 177     HandleMark hm;
 178 
 179     GCTraceCPUTime tcpu;
 180     GCTraceTime(Info, gc) t("Pause Full", NULL, gc_cause, true);
 181 
 182     heap->pre_full_gc_dump(_gc_timer);
 183 
 184     TraceCollectorStats tcs(counters());
 185     TraceMemoryManagerStats tms(heap->old_gc_manager(),gc_cause);
 186 
 187     if (log_is_enabled(Debug, gc, heap, exit)) {
 188       accumulated_time()->start();
 189     }
 190 
 191     // Let the size policy know we're starting
 192     size_policy->major_collection_begin();
 193 
 194     BiasedLocking::preserve_marks();
 195 
 196     // Capture metadata size before collection for sizing.
 197     size_t metadata_prev_used = MetaspaceUtils::used_bytes();
 198 
 199     size_t old_gen_prev_used = old_gen->used_in_bytes();
 200     size_t young_gen_prev_used = young_gen->used_in_bytes();
 201 
 202     allocate_stacks();
 203 
 204 #if COMPILER2_OR_JVMCI
 205     DerivedPointerTable::clear();
 206 #endif
 207 
 208     ref_processor()->enable_discovery();
 209     ref_processor()->setup_policy(clear_all_softrefs);
 210 
 211     mark_sweep_phase1(clear_all_softrefs);
 212 
 213     mark_sweep_phase2();
 214 
 215 #if COMPILER2_OR_JVMCI
 216     // Don't add any more derived pointers during phase3
 217     assert(DerivedPointerTable::is_active(), "Sanity");
 218     DerivedPointerTable::set_active(false);
 219 #endif
 220 


 242     Universe::update_heap_info_at_gc();
 243 
 244     survivors_empty = young_gen->from_space()->is_empty() &&
 245                       young_gen->to_space()->is_empty();
 246     young_gen_empty = eden_empty && survivors_empty;
 247 
 248     PSCardTable* card_table = heap->card_table();
 249     MemRegion old_mr = heap->old_gen()->reserved();
 250     if (young_gen_empty) {
 251       card_table->clear(MemRegion(old_mr.start(), old_mr.end()));
 252     } else {
 253       card_table->invalidate(MemRegion(old_mr.start(), old_mr.end()));
 254     }
 255 
 256     // Delete metaspaces for unloaded class loaders and clean up loader_data graph
 257     ClassLoaderDataGraph::purge();
 258     MetaspaceUtils::verify_metrics();
 259 
 260     BiasedLocking::restore_marks();
 261     heap->prune_scavengable_nmethods();
 262     JvmtiExport::gc_epilogue();
 263 
 264 #if COMPILER2_OR_JVMCI
 265     DerivedPointerTable::update_pointers();
 266 #endif
 267 
 268     assert(!ref_processor()->discovery_enabled(), "Should have been disabled earlier");
 269 
 270     // Update time of last GC
 271     reset_millis_since_last_gc();
 272 
 273     // Let the size policy know we're done
 274     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
 275 
 276     if (UseAdaptiveSizePolicy) {
 277 
 278      log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
 279      log_trace(gc, ergo)("old_gen_capacity: " SIZE_FORMAT " young_gen_capacity: " SIZE_FORMAT,
 280                          old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
 281 
 282       // Don't check if the size_policy is ready here.  Let


 336       log_debug(gc, ergo)("AdaptiveSizeStop: collection: %d ", heap->total_collections());
 337     }
 338 
 339     if (UsePerfData) {
 340       heap->gc_policy_counters()->update_counters();
 341       heap->gc_policy_counters()->update_old_capacity(
 342         old_gen->capacity_in_bytes());
 343       heap->gc_policy_counters()->update_young_capacity(
 344         young_gen->capacity_in_bytes());
 345     }
 346 
 347     heap->resize_all_tlabs();
 348 
 349     // We collected the heap, recalculate the metaspace capacity
 350     MetaspaceGC::compute_new_size();
 351 
 352     if (log_is_enabled(Debug, gc, heap, exit)) {
 353       accumulated_time()->stop();
 354     }
 355 
 356     young_gen->print_used_change(young_gen_prev_used);
 357     old_gen->print_used_change(old_gen_prev_used);
 358     MetaspaceUtils::print_metaspace_change(metadata_prev_used);
 359 
 360     // Track memory usage and detect low memory
 361     MemoryService::track_memory_usage();
 362     heap->update_counters();
 363 
 364     heap->post_full_gc_dump(_gc_timer);
 365   }
 366 
 367   if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) {
 368     HandleMark hm;  // Discard invalid handles created during verification
 369     Universe::verify("After GC");
 370   }
 371 
 372   // Re-verify object start arrays
 373   if (VerifyObjectStartArray &&
 374       VerifyAfterGC) {
 375     old_gen->verify_object_start_array();
 376   }
 377 
 378   if (ZapUnusedHeapArea) {


 512   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 513 
 514   // Need to clear claim bits before the tracing starts.
 515   ClassLoaderDataGraph::clear_claimed_marks();
 516 
 517   // General strong roots.
 518   {
 519     ParallelScavengeHeap::ParStrongRootsScope psrs;
 520     Universe::oops_do(mark_and_push_closure());
 521     JNIHandles::oops_do(mark_and_push_closure());   // Global (strong) JNI handles
 522     MarkingCodeBlobClosure each_active_code_blob(mark_and_push_closure(), !CodeBlobToOopClosure::FixRelocations);
 523     Threads::oops_do(mark_and_push_closure(), &each_active_code_blob);
 524     ObjectSynchronizer::oops_do(mark_and_push_closure());
 525     Management::oops_do(mark_and_push_closure());
 526     JvmtiExport::oops_do(mark_and_push_closure());
 527     SystemDictionary::oops_do(mark_and_push_closure());
 528     ClassLoaderDataGraph::always_strong_cld_do(follow_cld_closure());
 529     // Do not treat nmethods as strong roots for mark/sweep, since we can unload them.
 530     //ScavengableNMethods::scavengable_nmethods_do(CodeBlobToOopClosure(mark_and_push_closure()));
 531     AOT_ONLY(AOTLoader::oops_do(mark_and_push_closure());)
 532     JVMCI_ONLY(JVMCI::oops_do(mark_and_push_closure());)
 533   }
 534 
 535   // Flush marking stack.
 536   follow_stack();
 537 
 538   // Process reference objects found during marking
 539   {
 540     GCTraceTime(Debug, gc, phases) t("Reference Processing", _gc_timer);
 541 
 542     ref_processor()->setup_policy(clear_all_softrefs);
 543     ReferenceProcessorPhaseTimes pt(_gc_timer, ref_processor()->max_num_queues());
 544     const ReferenceProcessorStats& stats =
 545       ref_processor()->process_discovered_references(
 546         is_alive_closure(), mark_and_push_closure(), follow_stack_closure(), NULL, &pt);
 547     gc_tracer()->report_gc_reference_stats(stats);
 548     pt.print_all_references();
 549   }
 550 
 551   // This is the point where the entire marking should have completed.
 552   assert(_marking_stack.is_empty(), "Marking should have completed");


 607   ClassLoaderDataGraph::clear_claimed_marks();
 608 
 609   // General strong roots.
 610   Universe::oops_do(adjust_pointer_closure());
 611   JNIHandles::oops_do(adjust_pointer_closure());   // Global (strong) JNI handles
 612   Threads::oops_do(adjust_pointer_closure(), NULL);
 613   ObjectSynchronizer::oops_do(adjust_pointer_closure());
 614   Management::oops_do(adjust_pointer_closure());
 615   JvmtiExport::oops_do(adjust_pointer_closure());
 616   SystemDictionary::oops_do(adjust_pointer_closure());
 617   ClassLoaderDataGraph::cld_do(adjust_cld_closure());
 618 
 619   // Now adjust pointers in remaining weak roots.  (All of which should
 620   // have been cleared if they pointed to non-surviving objects.)
 621   // Global (weak) JNI handles
 622   WeakProcessor::oops_do(adjust_pointer_closure());
 623 
 624   CodeBlobToOopClosure adjust_from_blobs(adjust_pointer_closure(), CodeBlobToOopClosure::FixRelocations);
 625   CodeCache::blobs_do(&adjust_from_blobs);
 626   AOT_ONLY(AOTLoader::oops_do(adjust_pointer_closure());)
 627 
 628   JVMCI_ONLY(JVMCI::oops_do(adjust_pointer_closure());)
 629 
 630   ref_processor()->weak_oops_do(adjust_pointer_closure());
 631   PSScavenge::reference_processor()->weak_oops_do(adjust_pointer_closure());
 632 
 633   adjust_marks();
 634 
 635   young_gen->adjust_pointers();
 636   old_gen->adjust_pointers();
 637 }
 638 
 639 void PSMarkSweep::mark_sweep_phase4() {
 640   EventMark m("4 compact heap");
 641   GCTraceTime(Info, gc, phases) tm("Phase 4: Move objects", _gc_timer);
 642 
 643   // All pointers are now adjusted, move objects accordingly
 644 
 645   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 646   PSYoungGen* young_gen = heap->young_gen();
 647   PSOldGen* old_gen = heap->old_gen();
 648 


< prev index next >