< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page

1043 
1044 #if COMPILER2_OR_JVMCI
1045     DerivedPointerTable::clear();
1046 #endif
1047 
1048     ref_processor()->start_discovery(clear_all_soft_refs);
1049 
1050     ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1051                               false /* unregister_nmethods_during_purge */,
1052                               false /* lock_nmethod_free_separately */);
1053 
1054     marking_phase(&_gc_tracer);
1055 
1056     summary_phase();
1057 
1058 #if COMPILER2_OR_JVMCI
1059     assert(DerivedPointerTable::is_active(), "Sanity");
1060     DerivedPointerTable::set_active(false);
1061 #endif
1062 


1063     forward_to_new_addr();
1064 
1065     adjust_pointers();
1066 
1067     compact();
1068 


1069     ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1070 
1071     ParCompactionManager::verify_all_region_stack_empty();
1072 
1073     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1074     // done before resizing.
1075     post_compact();
1076 
1077     // Let the size policy know we're done
1078     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1079 
1080     if (UseAdaptiveSizePolicy) {
1081       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1082       log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1083                           old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1084 
1085       // Don't check if the size_policy is ready here.  Let
1086       // the size_policy check that internally.
1087       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1088           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {

1043 
1044 #if COMPILER2_OR_JVMCI
1045     DerivedPointerTable::clear();
1046 #endif
1047 
1048     ref_processor()->start_discovery(clear_all_soft_refs);
1049 
1050     ClassUnloadingContext ctx(1 /* num_nmethod_unlink_workers */,
1051                               false /* unregister_nmethods_during_purge */,
1052                               false /* lock_nmethod_free_separately */);
1053 
1054     marking_phase(&_gc_tracer);
1055 
1056     summary_phase();
1057 
1058 #if COMPILER2_OR_JVMCI
1059     assert(DerivedPointerTable::is_active(), "Sanity");
1060     DerivedPointerTable::set_active(false);
1061 #endif
1062 
1063     FullGCForwarding::begin();
1064 
1065     forward_to_new_addr();
1066 
1067     adjust_pointers();
1068 
1069     compact();
1070 
1071     FullGCForwarding::end();
1072 
1073     ParCompactionManager::_preserved_marks_set->restore(&ParallelScavengeHeap::heap()->workers());
1074 
1075     ParCompactionManager::verify_all_region_stack_empty();
1076 
1077     // Reset the mark bitmap, summary data, and do other bookkeeping.  Must be
1078     // done before resizing.
1079     post_compact();
1080 
1081     // Let the size policy know we're done
1082     size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
1083 
1084     if (UseAdaptiveSizePolicy) {
1085       log_debug(gc, ergo)("AdaptiveSizeStart: collection: %d ", heap->total_collections());
1086       log_trace(gc, ergo)("old_gen_capacity: %zu young_gen_capacity: %zu",
1087                           old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
1088 
1089       // Don't check if the size_policy is ready here.  Let
1090       // the size_policy check that internally.
1091       if (UseAdaptiveGenerationSizePolicyAtMajorCollection &&
1092           AdaptiveSizePolicy::should_update_promo_stats(gc_cause)) {
< prev index next >