510 void ParallelCompactData::verify_clear() {
511 for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
512 if (!region(cur_idx)->is_clear()) {
513 log_warning(gc)("Uncleared Region: %u", cur_idx);
514 region(cur_idx)->verify_clear();
515 }
516 }
517 }
518 #endif // #ifdef ASSERT
519
520 STWGCTimer PSParallelCompact::_gc_timer;
521 ParallelOldTracer PSParallelCompact::_gc_tracer;
522 elapsedTimer PSParallelCompact::_accumulated_time;
523 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0;
524 CollectorCounters* PSParallelCompact::_counters = nullptr;
525 ParMarkBitMap PSParallelCompact::_mark_bitmap;
526 ParallelCompactData PSParallelCompact::_summary_data;
527
528 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
529
530 class PCAdjustPointerClosure: public BasicOopIterateClosure {
531 template <typename T>
532 void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
533
534 public:
535 virtual void do_oop(oop* p) { do_oop_work(p); }
536 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
537
538 virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
539 };
540
541 static PCAdjustPointerClosure pc_adjust_pointer_closure;
542
543 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
544
545 void PSParallelCompact::post_initialize() {
546 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
547 _span_based_discoverer.set_span(heap->reserved_region());
548 _ref_processor =
549 new ReferenceProcessor(&_span_based_discoverer,
550 ParallelGCThreads, // mt processing degree
551 ParallelGCThreads, // mt discovery degree
552 false, // concurrent_discovery
553 &_is_alive_closure); // non-header is alive closure
554
555 _counters = new CollectorCounters("Parallel full collection pauses", 1);
556
557 // Initialize static fields in ParCompactionManager.
558 ParCompactionManager::initialize(mark_bitmap());
559 }
560
561 bool PSParallelCompact::initialize_aux_data() {
977 }
978
979 // Let the size policy know we're starting
980 size_policy->major_collection_begin();
981
982 #if COMPILER2_OR_JVMCI
983 DerivedPointerTable::clear();
984 #endif
985
986 ref_processor()->start_discovery(clear_all_soft_refs);
987
988 marking_phase(&_gc_tracer);
989
990 summary_phase(should_do_max_compaction);
991
992 #if COMPILER2_OR_JVMCI
993 assert(DerivedPointerTable::is_active(), "Sanity");
994 DerivedPointerTable::set_active(false);
995 #endif
996
997 forward_to_new_addr();
998
999 adjust_pointers();
1000
1001 compact();
1002
1003 ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1004
1005 ParCompactionManager::verify_all_region_stack_empty();
1006
1007 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
1008 // done before resizing.
1009 post_compact();
1010
1011 size_policy->major_collection_end();
1012
1013 size_policy->sample_old_gen_used_bytes(MAX2(pre_gc_values.old_gen_used(), old_gen->used_in_bytes()));
1014
1015 if (UseAdaptiveSizePolicy) {
1016 heap->resize_after_full_gc();
1017 }
1018
1019 heap->resize_all_tlabs();
1020
1021 // Resize the metaspace capacity after a collection
1022 MetaspaceGC::compute_new_size();
|
510 void ParallelCompactData::verify_clear() {
511 for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
512 if (!region(cur_idx)->is_clear()) {
513 log_warning(gc)("Uncleared Region: %u", cur_idx);
514 region(cur_idx)->verify_clear();
515 }
516 }
517 }
518 #endif // #ifdef ASSERT
519
520 STWGCTimer PSParallelCompact::_gc_timer;
521 ParallelOldTracer PSParallelCompact::_gc_tracer;
522 elapsedTimer PSParallelCompact::_accumulated_time;
523 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0;
524 CollectorCounters* PSParallelCompact::_counters = nullptr;
525 ParMarkBitMap PSParallelCompact::_mark_bitmap;
526 ParallelCompactData PSParallelCompact::_summary_data;
527
528 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
529
530 class PCAdjustPointerClosureNew: public BasicOopIterateClosure {
531 template <typename T>
532 void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
533
534 public:
535 virtual void do_oop(oop* p) { do_oop_work(p); }
536 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
537
538 virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
539 };
540
541 static PCAdjustPointerClosureNew pc_adjust_pointer_closure;
542
543 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
544
545 void PSParallelCompact::post_initialize() {
546 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
547 _span_based_discoverer.set_span(heap->reserved_region());
548 _ref_processor =
549 new ReferenceProcessor(&_span_based_discoverer,
550 ParallelGCThreads, // mt processing degree
551 ParallelGCThreads, // mt discovery degree
552 false, // concurrent_discovery
553 &_is_alive_closure); // non-header is alive closure
554
555 _counters = new CollectorCounters("Parallel full collection pauses", 1);
556
557 // Initialize static fields in ParCompactionManager.
558 ParCompactionManager::initialize(mark_bitmap());
559 }
560
561 bool PSParallelCompact::initialize_aux_data() {
977 }
978
979 // Let the size policy know we're starting
980 size_policy->major_collection_begin();
981
982 #if COMPILER2_OR_JVMCI
983 DerivedPointerTable::clear();
984 #endif
985
986 ref_processor()->start_discovery(clear_all_soft_refs);
987
988 marking_phase(&_gc_tracer);
989
990 summary_phase(should_do_max_compaction);
991
992 #if COMPILER2_OR_JVMCI
993 assert(DerivedPointerTable::is_active(), "Sanity");
994 DerivedPointerTable::set_active(false);
995 #endif
996
997 FullGCForwarding::begin();
998
999 forward_to_new_addr();
1000
1001 adjust_pointers();
1002
1003 compact();
1004
1005 FullGCForwarding::end();
1006
1007 ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1008
1009 ParCompactionManager::verify_all_region_stack_empty();
1010
1011 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
1012 // done before resizing.
1013 post_compact();
1014
1015 size_policy->major_collection_end();
1016
1017 size_policy->sample_old_gen_used_bytes(MAX2(pre_gc_values.old_gen_used(), old_gen->used_in_bytes()));
1018
1019 if (UseAdaptiveSizePolicy) {
1020 heap->resize_after_full_gc();
1021 }
1022
1023 heap->resize_all_tlabs();
1024
1025 // Resize the metaspace capacity after a collection
1026 MetaspaceGC::compute_new_size();
|