705 // Update region state for both young and old regions
706 ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);
707 ShenandoahInitMarkUpdateRegionStateClosure cl;
708 heap->parallel_heap_region_iterate(&cl);
709 heap->old_generation()->ref_processor()->reset_thread_locals();
710 } else {
711 // Update region state for only young regions
712 ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);
713 ShenandoahInitMarkUpdateRegionStateClosure cl;
714 _generation->parallel_heap_region_iterate(&cl);
715 }
716
717 // Weak reference processing
718 ShenandoahReferenceProcessor* rp = _generation->ref_processor();
719 rp->reset_thread_locals();
720
721 // Make above changes visible to worker threads
722 OrderAccess::fence();
723
724 // Arm nmethods for concurrent mark
725 ShenandoahCodeRoots::arm_nmethods_for_mark();
726
727 ShenandoahStackWatermark::change_epoch_id();
728
729 {
730 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_propagate_gc_state);
731 heap->propagate_gc_state_to_all_threads();
732 }
733 }
734
735 void ShenandoahConcurrentGC::op_mark_roots() {
736 _mark.mark_concurrent_roots();
737 }
738
739 void ShenandoahConcurrentGC::op_mark() {
740 _mark.concurrent_mark();
741 }
742
743 void ShenandoahConcurrentGC::op_final_mark() {
744 ShenandoahHeap* const heap = ShenandoahHeap::heap();
745 assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");
746 assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");
747
748 if (ShenandoahVerify) {
749 heap->verifier()->verify_roots_no_forwarded(_generation);
750 }
751
752 if (!heap->cancelled_gc()) {
753 _mark.finish_mark();
754 assert(!heap->cancelled_gc(), "STW mark cannot OOM");
755
756 // Notify JVMTI that the tagmap table will need cleaning.
757 JvmtiTagMap::set_needs_cleaning();
758
759 // The collection set is chosen by prepare_regions_and_collection_set(). Additionally, certain parameters have been
760 // established to govern the evacuation efforts that are about to begin. Refer to comments on reserve members in
761 // ShenandoahGeneration and ShenandoahOldGeneration for more detail.
762 _generation->prepare_regions_and_collection_set(true /*concurrent*/);
763
764 // Has to be done after cset selection
765 heap->prepare_concurrent_roots();
766
767 if (!heap->collection_set()->is_empty()) {
768 LogTarget(Debug, gc, cset) lt;
769 if (lt.is_enabled()) {
770 ResourceMark rm;
771 LogStream ls(lt);
772 heap->collection_set()->print_on(&ls);
773 }
774
775 if (ShenandoahVerify) {
776 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
777 heap->verifier()->verify_before_evacuation(_generation);
778 }
779
780 heap->set_evacuation_in_progress(true);
781 // From here on, we need to update references.
782 heap->set_has_forwarded_objects(true);
783
784 // Arm nmethods/stack for concurrent processing
785 ShenandoahCodeRoots::arm_nmethods_for_evac();
786 ShenandoahStackWatermark::change_epoch_id();
787
788 } else {
789 if (ShenandoahVerify) {
790 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
791 if (has_in_place_promotions(heap)) {
792 heap->verifier()->verify_after_concmark_with_promotions(_generation);
793 } else {
794 heap->verifier()->verify_after_concmark(_generation);
795 }
796 }
797 }
798 }
799
800 {
801 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_mark_propagate_gc_state);
802 heap->propagate_gc_state_to_all_threads();
803 }
804 }
805
1011 }
1012
1013 class ShenandoahEvacUpdateCodeCacheClosure : public NMethodClosure {
1014 private:
1015 BarrierSetNMethod* const _bs;
1016 ShenandoahEvacuateUpdateMetadataClosure _cl;
1017
1018 public:
1019 ShenandoahEvacUpdateCodeCacheClosure() :
1020 _bs(BarrierSet::barrier_set()->barrier_set_nmethod()),
1021 _cl() {
1022 }
1023
1024 void do_nmethod(nmethod* n) {
1025 ShenandoahNMethod* data = ShenandoahNMethod::gc_data(n);
1026 ShenandoahNMethodLocker locker(data->lock());
1027 // Setup EvacOOM scope below reentrant lock to avoid deadlock with
1028 // nmethod_entry_barrier
1029 ShenandoahEvacOOMScope oom;
1030 data->oops_do(&_cl, true/*fix relocation*/);
1031 _bs->disarm(n);
1032 }
1033 };
1034
1035 class ShenandoahConcurrentRootsEvacUpdateTask : public WorkerTask {
1036 private:
1037 ShenandoahPhaseTimings::Phase _phase;
1038 ShenandoahVMRoots<true /*concurrent*/> _vm_roots;
1039 ShenandoahClassLoaderDataRoots<true /*concurrent*/>
1040 _cld_roots;
1041 ShenandoahConcurrentNMethodIterator _nmethod_itr;
1042
1043 public:
1044 ShenandoahConcurrentRootsEvacUpdateTask(ShenandoahPhaseTimings::Phase phase) :
1045 WorkerTask("Shenandoah Evacuate/Update Concurrent Strong Roots"),
1046 _phase(phase),
1047 _vm_roots(phase),
1048 _cld_roots(phase, ShenandoahHeap::heap()->workers()->active_workers(), false /*heap iteration*/),
1049 _nmethod_itr(ShenandoahCodeRoots::table()) {}
1050
1051 void work(uint worker_id) {
1200 heap->set_has_forwarded_objects(false);
1201
1202 if (heap->mode()->is_generational() && heap->is_concurrent_old_mark_in_progress()) {
1203 // Aging_cycle is only relevant during evacuation cycle for individual objects and during final mark for
1204 // entire regions. Both of these relevant operations occur before final update refs.
1205 ShenandoahGenerationalHeap::heap()->set_aging_cycle(false);
1206 }
1207
1208 if (ShenandoahVerify) {
1209 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_update_refs_verify);
1210 heap->verifier()->verify_after_update_refs(_generation);
1211 }
1212
1213 if (VerifyAfterGC) {
1214 Universe::verify();
1215 }
1216
1217 heap->rebuild_free_set(true /*concurrent*/);
1218 _generation->heuristics()->start_idle_span();
1219
1220 {
1221 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_update_refs_propagate_gc_state);
1222 heap->propagate_gc_state_to_all_threads();
1223 }
1224 }
1225
1226 bool ShenandoahConcurrentGC::entry_final_roots() {
1227 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1228 TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
1229
1230
1231 const char* msg = conc_final_roots_event_message();
1232 ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_final_roots);
1233 EventMark em("%s", msg);
1234 ShenandoahWorkerScope scope(heap->workers(),
1235 ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),
1236 msg);
1237
1238 if (heap->mode()->is_generational()) {
1239 if (!complete_abbreviated_cycle()) {
1257 "cleanup complete.");
1258 ShenandoahHeap::heap()->recycle_trash();
1259 }
1260
1261 void ShenandoahConcurrentGC::op_reset_after_collect() {
1262 ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),
1263 ShenandoahWorkerPolicy::calc_workers_for_conc_reset(),
1264 "reset after collection.");
1265
1266 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1267 if (heap->mode()->is_generational()) {
1268 // If we are in the midst of an old gc bootstrap or an old marking, we want to leave the mark bit map of
1269 // the young generation intact. In particular, reference processing in the old generation may potentially
1270 // need the reachability of a young generation referent of a Reference object in the old generation.
1271 if (!_do_old_gc_bootstrap && !heap->is_concurrent_old_mark_in_progress()) {
1272 heap->young_generation()->reset_mark_bitmap<false>();
1273 }
1274 } else {
1275 _generation->reset_mark_bitmap<false>();
1276 }
1277 }
1278
1279 bool ShenandoahConcurrentGC::check_cancellation_and_abort(ShenandoahDegenPoint point) {
1280 if (ShenandoahHeap::heap()->cancelled_gc()) {
1281 _degen_point = point;
1282 return true;
1283 }
1284 return false;
1285 }
1286
1287 const char* ShenandoahConcurrentGC::init_mark_event_message() const {
1288 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1289 assert(!heap->has_forwarded_objects(), "Should not have forwarded objects here");
1290 if (heap->unload_classes()) {
1291 SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Init Mark", " (unload classes)");
1292 } else {
1293 SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Init Mark", "");
1294 }
1295 }
1296
|
705 // Update region state for both young and old regions
706 ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);
707 ShenandoahInitMarkUpdateRegionStateClosure cl;
708 heap->parallel_heap_region_iterate(&cl);
709 heap->old_generation()->ref_processor()->reset_thread_locals();
710 } else {
711 // Update region state for only young regions
712 ShenandoahGCPhase phase(ShenandoahPhaseTimings::init_update_region_states);
713 ShenandoahInitMarkUpdateRegionStateClosure cl;
714 _generation->parallel_heap_region_iterate(&cl);
715 }
716
717 // Weak reference processing
718 ShenandoahReferenceProcessor* rp = _generation->ref_processor();
719 rp->reset_thread_locals();
720
721 // Make above changes visible to worker threads
722 OrderAccess::fence();
723
724 // Arm nmethods for concurrent mark
725 ShenandoahCodeRoots::arm_nmethods();
726 ShenandoahStackWatermark::change_epoch_id();
727
728 {
729 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::init_propagate_gc_state);
730 heap->propagate_gc_state_to_all_threads();
731 }
732 }
733
734 void ShenandoahConcurrentGC::op_mark_roots() {
735 _mark.mark_concurrent_roots();
736 }
737
738 void ShenandoahConcurrentGC::op_mark() {
739 _mark.concurrent_mark();
740 }
741
742 void ShenandoahConcurrentGC::op_final_mark() {
743 ShenandoahHeap* const heap = ShenandoahHeap::heap();
744 assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");
745 assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");
746
747 if (ShenandoahVerify) {
748 heap->verifier()->verify_roots_no_forwarded(_generation);
749 }
750
751 if (!heap->cancelled_gc()) {
752 _mark.finish_mark();
753 assert(!heap->cancelled_gc(), "STW mark cannot OOM");
754
755 // Notify JVMTI that the tagmap table will need cleaning.
756 JvmtiTagMap::set_needs_cleaning();
757
758 // The collection set is chosen by prepare_regions_and_collection_set(). Additionally, certain parameters have been
759 // established to govern the evacuation efforts that are about to begin. Refer to comments on reserve members in
760 // ShenandoahGeneration and ShenandoahOldGeneration for more detail.
761 _generation->prepare_regions_and_collection_set(true /*concurrent*/);
762
763 // Has to be done after cset selection
764 heap->prepare_concurrent_roots();
765
766 // ShenandoahGCStateCheckHotpatch: we need full cycle to patch barriers back to idle.
767 // final-roots is pauseless, so there is no way to arm barriers.
768 if (ShenandoahGCStateCheckHotpatch || !heap->collection_set()->is_empty()) {
769 LogTarget(Debug, gc, cset) lt;
770 if (lt.is_enabled()) {
771 ResourceMark rm;
772 LogStream ls(lt);
773 heap->collection_set()->print_on(&ls);
774 }
775
776 if (ShenandoahVerify) {
777 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
778 heap->verifier()->verify_before_evacuation(_generation);
779 }
780
781 heap->set_evacuation_in_progress(true);
782 // From here on, we need to update references.
783 heap->set_has_forwarded_objects(true);
784
785 // Arm nmethods/stack for concurrent processing
786 ShenandoahCodeRoots::arm_nmethods();
787 ShenandoahStackWatermark::change_epoch_id();
788
789 } else {
790 if (ShenandoahVerify) {
791 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_mark_verify);
792 if (has_in_place_promotions(heap)) {
793 heap->verifier()->verify_after_concmark_with_promotions(_generation);
794 } else {
795 heap->verifier()->verify_after_concmark(_generation);
796 }
797 }
798 }
799 }
800
801 {
802 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_mark_propagate_gc_state);
803 heap->propagate_gc_state_to_all_threads();
804 }
805 }
806
1012 }
1013
1014 class ShenandoahEvacUpdateCodeCacheClosure : public NMethodClosure {
1015 private:
1016 BarrierSetNMethod* const _bs;
1017 ShenandoahEvacuateUpdateMetadataClosure _cl;
1018
1019 public:
1020 ShenandoahEvacUpdateCodeCacheClosure() :
1021 _bs(BarrierSet::barrier_set()->barrier_set_nmethod()),
1022 _cl() {
1023 }
1024
1025 void do_nmethod(nmethod* n) {
1026 ShenandoahNMethod* data = ShenandoahNMethod::gc_data(n);
1027 ShenandoahNMethodLocker locker(data->lock());
1028 // Setup EvacOOM scope below reentrant lock to avoid deadlock with
1029 // nmethod_entry_barrier
1030 ShenandoahEvacOOMScope oom;
1031 data->oops_do(&_cl, true/*fix relocation*/);
1032 ShenandoahNMethod::disarm_nmethod_unlocked(n);
1033 }
1034 };
1035
1036 class ShenandoahConcurrentRootsEvacUpdateTask : public WorkerTask {
1037 private:
1038 ShenandoahPhaseTimings::Phase _phase;
1039 ShenandoahVMRoots<true /*concurrent*/> _vm_roots;
1040 ShenandoahClassLoaderDataRoots<true /*concurrent*/>
1041 _cld_roots;
1042 ShenandoahConcurrentNMethodIterator _nmethod_itr;
1043
1044 public:
1045 ShenandoahConcurrentRootsEvacUpdateTask(ShenandoahPhaseTimings::Phase phase) :
1046 WorkerTask("Shenandoah Evacuate/Update Concurrent Strong Roots"),
1047 _phase(phase),
1048 _vm_roots(phase),
1049 _cld_roots(phase, ShenandoahHeap::heap()->workers()->active_workers(), false /*heap iteration*/),
1050 _nmethod_itr(ShenandoahCodeRoots::table()) {}
1051
1052 void work(uint worker_id) {
1201 heap->set_has_forwarded_objects(false);
1202
1203 if (heap->mode()->is_generational() && heap->is_concurrent_old_mark_in_progress()) {
1204 // Aging_cycle is only relevant during evacuation cycle for individual objects and during final mark for
1205 // entire regions. Both of these relevant operations occur before final update refs.
1206 ShenandoahGenerationalHeap::heap()->set_aging_cycle(false);
1207 }
1208
1209 if (ShenandoahVerify) {
1210 ShenandoahTimingsTracker v(ShenandoahPhaseTimings::final_update_refs_verify);
1211 heap->verifier()->verify_after_update_refs(_generation);
1212 }
1213
1214 if (VerifyAfterGC) {
1215 Universe::verify();
1216 }
1217
1218 heap->rebuild_free_set(true /*concurrent*/);
1219 _generation->heuristics()->start_idle_span();
1220
1221 if (ShenandoahGCStateCheckHotpatch) {
1222 // Final pause: update GC barriers to idle state.
1223 ShenandoahCodeRoots::arm_nmethods();
1224 ShenandoahStackWatermark::change_epoch_id();
1225 }
1226
1227 {
1228 ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::final_update_refs_propagate_gc_state);
1229 heap->propagate_gc_state_to_all_threads();
1230 }
1231 }
1232
1233 bool ShenandoahConcurrentGC::entry_final_roots() {
1234 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1235 TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
1236
1237
1238 const char* msg = conc_final_roots_event_message();
1239 ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::conc_final_roots);
1240 EventMark em("%s", msg);
1241 ShenandoahWorkerScope scope(heap->workers(),
1242 ShenandoahWorkerPolicy::calc_workers_for_conc_evac(),
1243 msg);
1244
1245 if (heap->mode()->is_generational()) {
1246 if (!complete_abbreviated_cycle()) {
1264 "cleanup complete.");
1265 ShenandoahHeap::heap()->recycle_trash();
1266 }
1267
1268 void ShenandoahConcurrentGC::op_reset_after_collect() {
1269 ShenandoahWorkerScope scope(ShenandoahHeap::heap()->workers(),
1270 ShenandoahWorkerPolicy::calc_workers_for_conc_reset(),
1271 "reset after collection.");
1272
1273 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1274 if (heap->mode()->is_generational()) {
1275 // If we are in the midst of an old gc bootstrap or an old marking, we want to leave the mark bit map of
1276 // the young generation intact. In particular, reference processing in the old generation may potentially
1277 // need the reachability of a young generation referent of a Reference object in the old generation.
1278 if (!_do_old_gc_bootstrap && !heap->is_concurrent_old_mark_in_progress()) {
1279 heap->young_generation()->reset_mark_bitmap<false>();
1280 }
1281 } else {
1282 _generation->reset_mark_bitmap<false>();
1283 }
1284
1285 // Also go and disable all barriers in all current nmethods.
1286 if (ShenandoahGCStateCheckHotpatch) {
1287 ShenandoahCodeRoots::disarm_nmethods();
1288 }
1289 }
1290
1291 bool ShenandoahConcurrentGC::check_cancellation_and_abort(ShenandoahDegenPoint point) {
1292 if (ShenandoahHeap::heap()->cancelled_gc()) {
1293 _degen_point = point;
1294 return true;
1295 }
1296 return false;
1297 }
1298
1299 const char* ShenandoahConcurrentGC::init_mark_event_message() const {
1300 ShenandoahHeap* const heap = ShenandoahHeap::heap();
1301 assert(!heap->has_forwarded_objects(), "Should not have forwarded objects here");
1302 if (heap->unload_classes()) {
1303 SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Init Mark", " (unload classes)");
1304 } else {
1305 SHENANDOAH_RETURN_EVENT_MESSAGE(_generation->type(), "Pause Init Mark", "");
1306 }
1307 }
1308
|