532 void ParallelCompactData::verify_clear() {
533 for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
534 if (!region(cur_idx)->is_clear()) {
535 log_warning(gc)("Uncleared Region: %u", cur_idx);
536 region(cur_idx)->verify_clear();
537 }
538 }
539 }
540 #endif // #ifdef ASSERT
541
542 STWGCTimer PSParallelCompact::_gc_timer;
543 ParallelOldTracer PSParallelCompact::_gc_tracer;
544 elapsedTimer PSParallelCompact::_accumulated_time;
545 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0;
546 CollectorCounters* PSParallelCompact::_counters = nullptr;
547 ParMarkBitMap PSParallelCompact::_mark_bitmap;
548 ParallelCompactData PSParallelCompact::_summary_data;
549
550 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
551
552 class PCAdjustPointerClosure: public BasicOopIterateClosure {
553 template <typename T>
554 void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
555
556 public:
557 virtual void do_oop(oop* p) { do_oop_work(p); }
558 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
559
560 virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
561 };
562
563 static PCAdjustPointerClosure pc_adjust_pointer_closure;
564
565 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
566
567 void PSParallelCompact::post_initialize() {
568 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
569 _span_based_discoverer.set_span(heap->reserved_region());
570 _ref_processor =
571 new ReferenceProcessor(&_span_based_discoverer,
572 ParallelGCThreads, // mt processing degree
573 ParallelGCThreads, // mt discovery degree
574 false, // concurrent_discovery
575 &_is_alive_closure); // non-header is alive closure
576
577 _counters = new CollectorCounters("Parallel full collection pauses", 1);
578
579 // Initialize static fields in ParCompactionManager.
580 ParCompactionManager::initialize(mark_bitmap());
581 }
582
583 bool PSParallelCompact::initialize_aux_data() {
1040
1041 #if COMPILER2_OR_JVMCI
1042 DerivedPointerTable::clear();
1043 #endif
1044
1045 ref_processor()->start_discovery(clear_all_soft_refs);
1046
1047 ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1048 false /* unregister_nmethods_during_purge */,
1049 false /* lock_nmethod_free_separately */);
1050
1051 marking_phase(&_gc_tracer);
1052
1053 summary_phase();
1054
1055 #if COMPILER2_OR_JVMCI
1056 assert(DerivedPointerTable::is_active(), "Sanity");
1057 DerivedPointerTable::set_active(false);
1058 #endif
1059
1060 forward_to_new_addr();
1061
1062 adjust_pointers();
1063
1064 compact();
1065
1066 ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1067
1068 ParCompactionManager::verify_all_region_stack_empty();
1069
1070 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
1071 // done before resizing.
1072 post_compact();
1073
1074 // Let the size policy know we're done
1075 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1076
1077 if (UseAdaptiveSizePolicy) {
1078 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1079 log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1080 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1081
1082 // Don't check if the size_policy is ready here. Let
1083 // the size_policy check that internally.
1084 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1085 AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
|
532 void ParallelCompactData::verify_clear() {
533 for (uint cur_idx = 0; cur_idx < region_count(); ++cur_idx) {
534 if (!region(cur_idx)->is_clear()) {
535 log_warning(gc)("Uncleared Region: %u", cur_idx);
536 region(cur_idx)->verify_clear();
537 }
538 }
539 }
540 #endif // #ifdef ASSERT
541
542 STWGCTimer PSParallelCompact::_gc_timer;
543 ParallelOldTracer PSParallelCompact::_gc_tracer;
544 elapsedTimer PSParallelCompact::_accumulated_time;
545 unsigned int PSParallelCompact::_maximum_compaction_gc_num = 0;
546 CollectorCounters* PSParallelCompact::_counters = nullptr;
547 ParMarkBitMap PSParallelCompact::_mark_bitmap;
548 ParallelCompactData PSParallelCompact::_summary_data;
549
550 PSParallelCompact::IsAliveClosure PSParallelCompact::_is_alive_closure;
551
552 class PCAdjustPointerClosureNew: public BasicOopIterateClosure {
553 template <typename T>
554 void do_oop_work(T* p) { PSParallelCompact::adjust_pointer(p); }
555
556 public:
557 virtual void do_oop(oop* p) { do_oop_work(p); }
558 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
559
560 virtual ReferenceIterationMode reference_iteration_mode() { return DO_FIELDS; }
561 };
562
563 static PCAdjustPointerClosureNew pc_adjust_pointer_closure;
564
565 bool PSParallelCompact::IsAliveClosure::do_object_b(oop p) { return mark_bitmap()->is_marked(p); }
566
567 void PSParallelCompact::post_initialize() {
568 ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
569 _span_based_discoverer.set_span(heap->reserved_region());
570 _ref_processor =
571 new ReferenceProcessor(&_span_based_discoverer,
572 ParallelGCThreads, // mt processing degree
573 ParallelGCThreads, // mt discovery degree
574 false, // concurrent_discovery
575 &_is_alive_closure); // non-header is alive closure
576
577 _counters = new CollectorCounters("Parallel full collection pauses", 1);
578
579 // Initialize static fields in ParCompactionManager.
580 ParCompactionManager::initialize(mark_bitmap());
581 }
582
583 bool PSParallelCompact::initialize_aux_data() {
1040
1041 #if COMPILER2_OR_JVMCI
1042 DerivedPointerTable::clear();
1043 #endif
1044
1045 ref_processor()->start_discovery(clear_all_soft_refs);
1046
1047 ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1048 false /* unregister_nmethods_during_purge */,
1049 false /* lock_nmethod_free_separately */);
1050
1051 marking_phase(&_gc_tracer);
1052
1053 summary_phase();
1054
1055 #if COMPILER2_OR_JVMCI
1056 assert(DerivedPointerTable::is_active(), "Sanity");
1057 DerivedPointerTable::set_active(false);
1058 #endif
1059
1060 FullGCForwarding::begin();
1061
1062 forward_to_new_addr();
1063
1064 adjust_pointers();
1065
1066 compact();
1067
1068 FullGCForwarding::end();
1069
1070 ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1071
1072 ParCompactionManager::verify_all_region_stack_empty();
1073
1074 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
1075 // done before resizing.
1076 post_compact();
1077
1078 // Let the size policy know we're done
1079 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1080
1081 if (UseAdaptiveSizePolicy) {
1082 log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1083 log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1084 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1085
1086 // Don't check if the size_policy is ready here. Let
1087 // the size_policy check that internally.
1088 if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1089 AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
|