1 /*
  2  * Copyright (c) 2021, Amazon.com, Inc. or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 
 27 #include "gc/shenandoah/shenandoahFreeSet.hpp"
 28 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 29 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
 30 #include "gc/shenandoah/shenandoahOldGC.hpp"
 31 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 32 #include "gc/shenandoah/shenandoahGeneration.hpp"
 33 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
 34 #include "gc/shenandoah/shenandoahWorkerPolicy.hpp"
 35 #include "prims/jvmtiTagMap.hpp"
 36 #include "utilities/events.hpp"
 37 
 38 class ShenandoahConcurrentCoalesceAndFillTask : public AbstractGangTask {
 39 private:
 40   uint _nworkers;
 41   ShenandoahHeapRegion** _coalesce_and_fill_region_array;
 42   uint _coalesce_and_fill_region_count;
 43   ShenandoahConcurrentGC* _old_gc;
 44   volatile bool _is_preempted;
 45 
 46 public:
 47   ShenandoahConcurrentCoalesceAndFillTask(uint nworkers, ShenandoahHeapRegion** coalesce_and_fill_region_array,
 48                                           uint region_count, ShenandoahConcurrentGC* old_gc) :
 49     AbstractGangTask("Shenandoah Concurrent Coalesce and Fill"),
 50     _nworkers(nworkers),
 51     _coalesce_and_fill_region_array(coalesce_and_fill_region_array),
 52     _coalesce_and_fill_region_count(region_count),
 53     _old_gc(old_gc),
 54     _is_preempted(false) {
 55   }
 56 
 57   void work(uint worker_id) {
 58     for (uint region_idx = worker_id; region_idx < _coalesce_and_fill_region_count; region_idx += _nworkers) {
 59       ShenandoahHeapRegion* r = _coalesce_and_fill_region_array[region_idx];
 60       if (!r->is_humongous()) {
 61         if (!r->oop_fill_and_coalesce()) {
 62           // Coalesce and fill has been preempted
 63           Atomic::store(&_is_preempted, true);
 64           return;
 65         }
 66       } else {
 67         // there's only one object in this region and it's not garbage, so no need to coalesce or fill
 68       }
 69     }
 70   }
 71 
 72   // Value returned from is_completed() is only valid after all worker thread have terminated.
 73   bool is_completed() {
 74     return !Atomic::load(&_is_preempted);
 75   }
 76 };
 77 
 78 
 79 ShenandoahOldGC::ShenandoahOldGC(ShenandoahGeneration* generation, ShenandoahSharedFlag& allow_preemption) :
 80     ShenandoahConcurrentGC(generation, false), _allow_preemption(allow_preemption) {
 81   _coalesce_and_fill_region_array = NEW_C_HEAP_ARRAY(ShenandoahHeapRegion*, ShenandoahHeap::heap()->num_regions(), mtGC);
 82 }
 83 
 84 void ShenandoahOldGC::start_old_evacuations() {
 85   ShenandoahHeap* heap = ShenandoahHeap::heap();
 86   ShenandoahOldHeuristics* old_heuristics = heap->old_heuristics();
 87   old_heuristics->start_old_evacuations();
 88 }
 89 
 90 
 91 // Final mark for old-gen is different than for young or old, so we
 92 // override the implementation.
 93 void ShenandoahOldGC::op_final_mark() {
 94 
 95   ShenandoahHeap* const heap = ShenandoahHeap::heap();
 96   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Should be at safepoint");
 97   assert(!heap->has_forwarded_objects(), "No forwarded objects on this path");
 98 
 99   if (ShenandoahVerify) {
100     heap->verifier()->verify_roots_no_forwarded();
101   }
102 
103   if (!heap->cancelled_gc()) {
104     assert(_mark.generation()->generation_mode() == OLD, "Generation of Old-Gen GC should be OLD");
105     _mark.finish_mark();
106     assert(!heap->cancelled_gc(), "STW mark cannot OOM");
107 
108     // We need to do this because weak root cleaning reports the number of dead handles
109     JvmtiTagMap::set_needs_cleaning();
110 
111     _generation->prepare_regions_and_collection_set(true);
112 
113     heap->set_unload_classes(false);
114     heap->prepare_concurrent_roots();
115 
116     // Believe verification following old-gen concurrent mark needs to be different than verification following
117     // young-gen concurrent mark, so am commenting this out for now:
118     //   if (ShenandoahVerify) {
119     //     heap->verifier()->verify_after_concmark();
120     //   }
121 
122     if (VerifyAfterGC) {
123       Universe::verify();
124     }
125   }
126 }
127 
128 bool ShenandoahOldGC::collect(GCCause::Cause cause) {
129   ShenandoahHeap* heap = ShenandoahHeap::heap();
130 
131   if (!heap->is_concurrent_prep_for_mixed_evacuation_in_progress()) {
132     // Skip over the initial phases of old collect if we're resuming mixed evacuation preparation.
133     // Continue concurrent mark, do not reset regions, do not mark roots, do not collect $200.
134     _allow_preemption.set();
135     entry_mark();
136     if (!_allow_preemption.try_unset()) {
137       // The regulator thread has unset the preemption guard. That thread will shortly cancel
138       // the gc, but the control thread is now racing it. Wait until this thread sees the cancellation.
139       while (!heap->cancelled_gc()) {
140         SpinPause();
141       }
142     }
143 
144     if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_mark)) {
145       return false;
146     }
147 
148     // Complete marking under STW
149     vmop_entry_final_mark();
150 
151     // We aren't dealing with old generation evacuation yet. Our heuristic
152     // should not have built a cset in final mark.
153     assert(!heap->is_evacuation_in_progress(), "Old gen evacuations are not supported");
154 
155     // Process weak roots that might still point to regions that would be broken by cleanup
156     if (heap->is_concurrent_weak_root_in_progress()) {
157       entry_weak_refs();
158       entry_weak_roots();
159     }
160 
161     // Final mark might have reclaimed some immediate garbage, kick cleanup to reclaim
162     // the space. This would be the last action if there is nothing to evacuate.
163     entry_cleanup_early();
164 
165     {
166       ShenandoahHeapLocker locker(heap->lock());
167       heap->free_set()->log_status();
168     }
169 
170 
171     // TODO: Old marking doesn't support class unloading yet
172     // Perform concurrent class unloading
173     // if (heap->unload_classes() &&
174     //     heap->is_concurrent_weak_root_in_progress()) {
175     //   entry_class_unloading();
176     // }
177 
178     heap->set_concurrent_prep_for_mixed_evacuation_in_progress(true);
179   }
180 
181   // Coalesce and fill objects _after_ weak root processing and class unloading.
182   // Weak root and reference processing makes assertions about unmarked referents
183   // that will fail if they've been overwritten with filler objects. There is also
184   // a case in the LRB that permits access to from-space objects for the purpose
185   // of class unloading that is unlikely to function correctly if the object has
186   // been filled.
187 
188   _allow_preemption.set();
189 
190   if (check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_evac)) {
191     return false;
192   }
193 
194   assert(!heap->is_concurrent_strong_root_in_progress(), "No evacuations during old gc.");
195 
196   vmop_entry_final_roots();
197 
198   if (heap->is_concurrent_prep_for_mixed_evacuation_in_progress()) {
199     if (!entry_coalesce_and_fill()) {
200       // If old-gen degenerates instead of resuming, we'll just start up an out-of-cycle degenerated GC.
201       // This should be a rare event.  Normally, we'll resume the coalesce-and-fill effort after the
202       // preempting young-gen GC finishes.
203       check_cancellation_and_abort(ShenandoahDegenPoint::_degenerated_outside_cycle);
204       return false;
205     }
206   }
207   if (!_allow_preemption.try_unset()) {
208     // The regulator thread has unset the preemption guard. That thread will shortly cancel
209     // the gc, but the control thread is now racing it. Wait until this thread sees the cancellation.
210     while (!heap->cancelled_gc()) {
211       SpinPause();
212     }
213   }
214   // Prepare for old evacuations (actual evacuations will happen on subsequent young collects).  This cannot
215   // begin until after we have completed coalesce-and-fill.
216   start_old_evacuations();
217 
218   return true;
219 }
220 
221 void ShenandoahOldGC::entry_coalesce_and_fill_message(char *buf, size_t len) const {
222   // ShenandoahHeap* const heap = ShenandoahHeap::heap();
223   jio_snprintf(buf, len, "Coalescing and filling (%s)", _generation->name());
224 }
225 
226 bool ShenandoahOldGC::op_coalesce_and_fill() {
227   ShenandoahHeap* const heap = ShenandoahHeap::heap();
228   ShenandoahOldHeuristics* old_heuristics = heap->old_heuristics();
229   WorkGang* workers = heap->workers();
230   uint nworkers = workers->active_workers();
231 
232   assert(_generation->generation_mode() == OLD, "Only old-GC does coalesce and fill");
233   log_debug(gc)("Starting (or resuming) coalesce-and-fill of old heap regions");
234   uint coalesce_and_fill_regions_count = old_heuristics->old_coalesce_and_fill_candidates();
235   assert(coalesce_and_fill_regions_count <= heap->num_regions(), "Sanity");
236   old_heuristics->get_coalesce_and_fill_candidates(_coalesce_and_fill_region_array);
237   ShenandoahConcurrentCoalesceAndFillTask task(nworkers, _coalesce_and_fill_region_array, coalesce_and_fill_regions_count, this);
238 
239   workers->run_task(&task);
240   if (task.is_completed()) {
241     // Remember that we're done with coalesce-and-fill.
242     heap->set_concurrent_prep_for_mixed_evacuation_in_progress(false);
243     return true;
244   } else {
245     log_debug(gc)("Suspending coalesce-and-fill of old heap regions");
246     // Otherwise, we got preempted before the work was done.
247     return false;
248   }
249 }
250 
251 bool ShenandoahOldGC::entry_coalesce_and_fill() {
252   char msg[1024];
253   ShenandoahHeap* const heap = ShenandoahHeap::heap();
254 
255   entry_coalesce_and_fill_message(msg, sizeof(msg));
256   ShenandoahConcurrentPhase gc_phase(msg, ShenandoahPhaseTimings::coalesce_and_fill);
257 
258   TraceCollectorStats tcs(heap->monitoring_support()->concurrent_collection_counters());
259   EventMark em("%s", msg);
260   ShenandoahWorkerScope scope(heap->workers(),
261                               ShenandoahWorkerPolicy::calc_workers_for_conc_marking(),
262                               "concurrent coalesce and fill");
263 
264   return op_coalesce_and_fill();
265 }