1 /*
  2  * Copyright (c) 2021, Red Hat, Inc. All rights reserved.
  3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 
 28 #include "gc/shared/collectorCounters.hpp"
 29 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
 30 #include "gc/shenandoah/shenandoahConcurrentMark.hpp"
 31 #include "gc/shenandoah/shenandoahDegeneratedGC.hpp"
 32 #include "gc/shenandoah/shenandoahFullGC.hpp"
 33 #include "gc/shenandoah/shenandoahGeneration.hpp"
 34 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 35 #include "gc/shenandoah/shenandoahMetrics.hpp"
 36 #include "gc/shenandoah/shenandoahMonitoringSupport.hpp"
 37 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
 38 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 39 #include "gc/shenandoah/shenandoahRootProcessor.inline.hpp"
 40 #include "gc/shenandoah/shenandoahSTWMark.hpp"
 41 #include "gc/shenandoah/shenandoahUtils.hpp"
 42 #include "gc/shenandoah/shenandoahVerifier.hpp"
 43 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
 44 #include "gc/shenandoah/shenandoahWorkerPolicy.hpp"
 45 #include "gc/shenandoah/shenandoahVMOperations.hpp"
 46 #include "runtime/vmThread.hpp"
 47 #include "utilities/events.hpp"
 48 
 49 ShenandoahDegenGC::ShenandoahDegenGC(ShenandoahDegenPoint degen_point, ShenandoahGeneration* generation) :
 50   ShenandoahGC(),
 51   _degen_point(degen_point),
 52   _generation(generation),
 53   _upgraded_to_full(false) {
 54 }
 55 
 56 bool ShenandoahDegenGC::collect(GCCause::Cause cause) {
 57   vmop_degenerated();
 58   ShenandoahHeap* heap = ShenandoahHeap::heap();
 59   if (heap->mode()->is_generational()) {
 60     bool is_bootstrap_gc = heap->is_concurrent_old_mark_in_progress() && _generation->is_young();
 61     heap->mmu_tracker()->record_degenerated(_generation, GCId::current(), is_bootstrap_gc,
 62                                             !heap->collection_set()->has_old_regions());
 63     const char* msg = is_bootstrap_gc? "At end of Degenerated Boostrap Old GC": "At end of Degenerated GC";
 64     heap->log_heap_status(msg);
 65   }
 66   return true;
 67 }
 68 
 69 void ShenandoahDegenGC::vmop_degenerated() {
 70   TraceCollectorStats tcs(ShenandoahHeap::heap()->monitoring_support()->full_stw_collection_counters());
 71   ShenandoahTimingsTracker timing(ShenandoahPhaseTimings::degen_gc_gross);
 72   VM_ShenandoahDegeneratedGC degenerated_gc(this);
 73   VMThread::execute(&degenerated_gc);
 74 }
 75 
 76 void ShenandoahDegenGC::entry_degenerated() {
 77   const char* msg = degen_event_message(_degen_point);
 78   ShenandoahPausePhase gc_phase(msg, ShenandoahPhaseTimings::degen_gc, true /* log_heap_usage */);
 79   EventMark em("%s", msg);
 80   ShenandoahHeap* const heap = ShenandoahHeap::heap();
 81   ShenandoahWorkerScope scope(heap->workers(),
 82                               ShenandoahWorkerPolicy::calc_workers_for_stw_degenerated(),
 83                               "stw degenerated gc");
 84 
 85   heap->set_degenerated_gc_in_progress(true);
 86   op_degenerated();
 87   heap->set_degenerated_gc_in_progress(false);
 88 }
 89 
 90 void ShenandoahDegenGC::op_degenerated() {
 91   ShenandoahHeap* const heap = ShenandoahHeap::heap();
 92   // Degenerated GC is STW, but it can also fail. Current mechanics communicates
 93   // GC failure via cancelled_concgc() flag. So, if we detect the failure after
 94   // some phase, we have to upgrade the Degenerate GC to Full GC.
 95   heap->clear_cancelled_gc(true /* clear oom handler */);
 96 
 97 #ifdef ASSERT
 98   if (heap->mode()->is_generational()) {
 99     if (_generation->is_global()) {
100       // We can only get to a degenerated global cycle _after_ a concurrent global cycle
101       // has been cancelled. In which case, we expect the concurrent global cycle to have
102       // cancelled the old gc already.
103       assert(!heap->is_old_gc_active(), "Old GC should not be active during global cycle");
104     }
105 
106     if (!heap->is_concurrent_old_mark_in_progress()) {
107       // If we are not marking the old generation, there should be nothing in the old mark queues
108       assert(heap->old_generation()->task_queues()->is_empty(), "Old gen task queues should be empty");
109     }
110   }
111 #endif
112 
113   ShenandoahMetricsSnapshot metrics;
114   metrics.snap_before();
115 
116   switch (_degen_point) {
117     // The cases below form the Duff's-like device: it describes the actual GC cycle,
118     // but enters it at different points, depending on which concurrent phase had
119     // degenerated.
120 
121     case _degenerated_outside_cycle:
122       // We have degenerated from outside the cycle, which means something is bad with
123       // the heap, most probably heavy humongous fragmentation, or we are very low on free
124       // space. It makes little sense to wait for Full GC to reclaim as much as it can, when
125       // we can do the most aggressive degen cycle, which includes processing references and
126       // class unloading, unless those features are explicitly disabled.
127 
128       if (heap->is_concurrent_old_mark_in_progress()) {
129         // We have come straight into a degenerated cycle without running a concurrent cycle
130         // first and the SATB barrier is enabled to support concurrent old marking. The SATB buffer
131         // may hold a mix of old and young pointers. The old pointers need to be transferred
132         // to the old generation mark queues and the young pointers are _not_ part of this
133         // snapshot, so they must be dropped here.
134         heap->transfer_old_pointers_from_satb();
135       }
136 
137       // Note that we can only do this for "outside-cycle" degens, otherwise we would risk
138       // changing the cycle parameters mid-cycle during concurrent -> degenerated handover.
139       heap->set_unload_classes(_generation->heuristics()->can_unload_classes() &&
140                                 (!heap->mode()->is_generational() || _generation->is_global()));
141 
142       if (heap->mode()->is_generational() &&
143             (_generation->is_young() || (_generation->is_global() && ShenandoahVerify))) {
144         // Swap remembered sets for young, or if the verifier will run during a global collect
145         // TODO: This path should not depend on ShenandoahVerify
146         _generation->swap_remembered_set();
147       }
148 
149     case _degenerated_roots:
150       // Degenerated from concurrent root mark, reset the flag for STW mark
151       if (!heap->mode()->is_generational()) {
152         if (heap->is_concurrent_mark_in_progress()) {
153           heap->cancel_concurrent_mark();
154         }
155       } else {
156         if (_generation->is_concurrent_mark_in_progress()) {
157           // We want to allow old generation marking to be punctuated by young collections
158           // (even if they have degenerated). If this is a global cycle, we'd have cancelled
159           // the entire old gc before coming into this switch.
160           _generation->cancel_marking();
161         }
162       }
163 
164       if (_degen_point == ShenandoahDegenPoint::_degenerated_roots) {
165         // We only need this if the concurrent cycle has already swapped the card tables.
166         // Marking will use the 'read' table, but interesting pointers may have been
167         // recorded in the 'write' table in the time between the cancelled concurrent cycle
168         // and this degenerated cycle. These pointers need to be included the 'read' table
169         // used to scan the remembered set during the STW mark which follows here.
170         _generation->merge_write_table();
171       }
172 
173       op_reset();
174 
175       // STW mark
176       op_mark();
177 
178     case _degenerated_mark:
179       // No fallthrough. Continue mark, handed over from concurrent mark if
180       // concurrent mark has yet completed
181       if (_degen_point == ShenandoahDegenPoint::_degenerated_mark &&
182           heap->is_concurrent_mark_in_progress()) {
183         op_finish_mark();
184       }
185       assert(!heap->cancelled_gc(), "STW mark can not OOM");
186 
187       /* Degen select Collection Set. etc. */
188       op_prepare_evacuation();
189 
190       op_cleanup_early();
191 
192     case _degenerated_evac:
193 
194       if (heap->mode()->is_generational() && _generation->is_global()) {
195         op_global_coalesce_and_fill();
196       }
197 
198       // If heuristics thinks we should do the cycle, this flag would be set,
199       // and we can do evacuation. Otherwise, it would be the shortcut cycle.
200       if (heap->is_evacuation_in_progress()) {
201 
202         if (_degen_point == _degenerated_evac) {
203           // Degeneration under oom-evac protocol allows the mutator LRB to expose
204           // references to from-space objects. This is okay, in theory, because we
205           // will come to the safepoint here to complete the evacuations and update
206           // the references. However, if the from-space reference is written to a
207           // region that was EC during final mark or was recycled after final mark
208           // it will not have TAMS or UWM updated. Such a region is effectively
209           // skipped during update references which can lead to crashes and corruption
210           // if the from-space reference is accessed.
211           if (UseTLAB) {
212             heap->labs_make_parsable();
213           }
214 
215           for (size_t i = 0; i < heap->num_regions(); i++) {
216             ShenandoahHeapRegion* r = heap->get_region(i);
217             if (r->is_active() && r->top() > r->get_update_watermark()) {
218               r->set_update_watermark_at_safepoint(r->top());
219             }
220           }
221         }
222 
223         // Degeneration under oom-evac protocol might have left some objects in
224         // collection set un-evacuated. Restart evacuation from the beginning to
225         // capture all objects. For all the objects that are already evacuated,
226         // it would be a simple check, which is supposed to be fast. This is also
227         // safe to do even without degeneration, as CSet iterator is at beginning
228         // in preparation for evacuation anyway.
229         //
230         // Before doing that, we need to make sure we never had any cset-pinned
231         // regions. This may happen if allocation failure happened when evacuating
232         // the about-to-be-pinned object, oom-evac protocol left the object in
233         // the collection set, and then the pin reached the cset region. If we continue
234         // the cycle here, we would trash the cset and alive objects in it. To avoid
235         // it, we fail degeneration right away and slide into Full GC to recover.
236 
237         {
238           heap->sync_pinned_region_status();
239           heap->collection_set()->clear_current_index();
240           ShenandoahHeapRegion* r;
241           while ((r = heap->collection_set()->next()) != nullptr) {
242             if (r->is_pinned()) {
243               heap->cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
244               op_degenerated_fail();
245               return;
246             }
247           }
248 
249           heap->collection_set()->clear_current_index();
250         }
251         op_evacuate();
252         if (heap->cancelled_gc()) {
253           op_degenerated_fail();
254           return;
255         }
256       }
257 
258       // If heuristics thinks we should do the cycle, this flag would be set,
259       // and we need to do update-refs. Otherwise, it would be the shortcut cycle.
260       if (heap->has_forwarded_objects()) {
261         op_init_updaterefs();
262         assert(!heap->cancelled_gc(), "STW reference update can not OOM");
263       }
264 
265     case _degenerated_updaterefs:
266       if (heap->has_forwarded_objects()) {
267         op_updaterefs();
268         op_update_roots();
269         assert(!heap->cancelled_gc(), "STW reference update can not OOM");
270       }
271 
272       if (ClassUnloading) {
273          // Disarm nmethods that armed in concurrent cycle.
274          // In above case, update roots should disarm them
275          ShenandoahCodeRoots::disarm_nmethods();
276       }
277 
278       op_cleanup_complete();
279       // We defer generation resizing actions until after cset regions have been recycled.
280       if (heap->mode()->is_generational()) {
281         size_t old_region_surplus = heap->get_old_region_surplus();
282         size_t old_region_deficit = heap->get_old_region_deficit();
283         bool success;
284         size_t region_xfer;
285         const char* region_destination;
286         if (old_region_surplus) {
287           region_xfer = old_region_surplus;
288           region_destination = "young";
289           success = heap->generation_sizer()->transfer_to_young(old_region_surplus);
290         } else if (old_region_deficit) {
291           region_xfer = old_region_surplus;
292           region_destination = "old";
293           success = heap->generation_sizer()->transfer_to_old(old_region_deficit);
294           if (!success) {
295             ((ShenandoahOldHeuristics *) heap->old_generation()->heuristics())->trigger_cannot_expand();
296           }
297         } else {
298           region_destination = "none";
299           region_xfer = 0;
300           success = true;
301         }
302 
303         size_t young_available = heap->young_generation()->available();
304         size_t old_available = heap->old_generation()->available();
305         log_info(gc, ergo)("After cleanup, %s " SIZE_FORMAT " regions to %s to prepare for next gc, old available: "
306                            SIZE_FORMAT "%s, young_available: " SIZE_FORMAT "%s",
307                            success? "successfully transferred": "failed to transfer", region_xfer, region_destination,
308                            byte_size_in_proper_unit(old_available), proper_unit_for_byte_size(old_available),
309                            byte_size_in_proper_unit(young_available), proper_unit_for_byte_size(young_available));
310 
311         heap->set_old_region_surplus(0);
312         heap->set_old_region_deficit(0);
313       }
314       break;
315     default:
316       ShouldNotReachHere();
317   }
318 
319   if (heap->mode()->is_generational()) {
320     // In case degeneration interrupted concurrent evacuation or update references, we need to clean up transient state.
321     // Otherwise, these actions have no effect.
322     heap->set_young_evac_reserve(0);
323     heap->set_old_evac_reserve(0);
324     heap->reset_old_evac_expended();
325     heap->set_promoted_reserve(0);
326   }
327 
328   if (ShenandoahVerify) {
329     heap->verifier()->verify_after_degenerated();
330   }
331 
332   if (VerifyAfterGC) {
333     Universe::verify();
334   }
335 
336   metrics.snap_after();
337 
338   // Check for futility and fail. There is no reason to do several back-to-back Degenerated cycles,
339   // because that probably means the heap is overloaded and/or fragmented.
340   if (!metrics.is_good_progress()) {
341     heap->notify_gc_no_progress();
342     heap->cancel_gc(GCCause::_shenandoah_upgrade_to_full_gc);
343     op_degenerated_futile();
344   } else {
345     heap->notify_gc_progress();
346   }
347 }
348 
349 void ShenandoahDegenGC::op_reset() {
350   _generation->prepare_gc();
351 }
352 
353 void ShenandoahDegenGC::op_mark() {
354   assert(!_generation->is_concurrent_mark_in_progress(), "Should be reset");
355   ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_mark);
356   ShenandoahSTWMark mark(_generation, false /*full gc*/);
357   mark.mark();
358 }
359 
360 void ShenandoahDegenGC::op_finish_mark() {
361   ShenandoahConcurrentMark mark(_generation);
362   mark.finish_mark();
363 }
364 
365 void ShenandoahDegenGC::op_prepare_evacuation() {
366   ShenandoahHeap* const heap = ShenandoahHeap::heap();
367   if (ShenandoahVerify) {
368     heap->verifier()->verify_roots_no_forwarded();
369   }
370 
371   // STW cleanup weak roots and unload classes
372   heap->parallel_cleaning(false /*full gc*/);
373 
374   // Prepare regions and collection set
375   _generation->prepare_regions_and_collection_set(false /*concurrent*/);
376 
377   // Retire the TLABs, which will force threads to reacquire their TLABs after the pause.
378   // This is needed for two reasons. Strong one: new allocations would be with new freeset,
379   // which would be outside the collection set, so no cset writes would happen there.
380   // Weaker one: new allocations would happen past update watermark, and so less work would
381   // be needed for reference updates (would update the large filler instead).
382   if (UseTLAB) {
383     ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_final_manage_labs);
384     heap->tlabs_retire(false);
385   }
386 
387   size_t humongous_regions_promoted = heap->get_promotable_humongous_regions();
388   size_t regular_regions_promoted_in_place = heap->get_regular_regions_promoted_in_place();
389   if (!heap->collection_set()->is_empty() || (humongous_regions_promoted + regular_regions_promoted_in_place > 0)) {
390     // Even if the collection set is empty, we need to do evacuation if there are regions to be promoted in place.
391     // Degenerated evacuation takes responsibility for registering objects and setting the remembered set cards to dirty.
392 
393     if (ShenandoahVerify) {
394       heap->verifier()->verify_before_evacuation();
395     }
396 
397     heap->set_evacuation_in_progress(true);
398     heap->set_has_forwarded_objects(true);
399 
400     if(ShenandoahVerify) {
401       heap->verifier()->verify_during_evacuation();
402     }
403   } else {
404     if (ShenandoahVerify) {
405       heap->verifier()->verify_after_concmark();
406     }
407 
408     if (VerifyAfterGC) {
409       Universe::verify();
410     }
411   }
412 }
413 
414 void ShenandoahDegenGC::op_cleanup_early() {
415   ShenandoahHeap::heap()->recycle_trash();
416 }
417 
418 void ShenandoahDegenGC::op_global_coalesce_and_fill() {
419   ShenandoahHeap::heap()->coalesce_and_fill_old_regions();
420 }
421 
422 void ShenandoahDegenGC::op_evacuate() {
423   ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_stw_evac);
424   ShenandoahHeap::heap()->evacuate_collection_set(false /* concurrent*/);
425 }
426 
427 void ShenandoahDegenGC::op_init_updaterefs() {
428   // Evacuation has completed
429   ShenandoahHeap* const heap = ShenandoahHeap::heap();
430   heap->set_evacuation_in_progress(false);
431   heap->set_concurrent_weak_root_in_progress(false);
432   heap->set_concurrent_strong_root_in_progress(false);
433 
434   heap->prepare_update_heap_references(false /*concurrent*/);
435   heap->set_update_refs_in_progress(true);
436 }
437 
438 void ShenandoahDegenGC::op_updaterefs() {
439   ShenandoahHeap* const heap = ShenandoahHeap::heap();
440   ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_updaterefs);
441   // Handed over from concurrent update references phase
442   heap->update_heap_references(false /*concurrent*/);
443 
444   heap->set_update_refs_in_progress(false);
445   heap->set_has_forwarded_objects(false);
446 }
447 
448 void ShenandoahDegenGC::op_update_roots() {
449   ShenandoahHeap* const heap = ShenandoahHeap::heap();
450 
451   update_roots(false /*full_gc*/);
452 
453   heap->update_heap_region_states(false /*concurrent*/);
454 
455   if (ShenandoahVerify) {
456     heap->verifier()->verify_after_updaterefs();
457   }
458 
459   if (VerifyAfterGC) {
460     Universe::verify();
461   }
462 
463   heap->rebuild_free_set(false /*concurrent*/);
464 }
465 
466 void ShenandoahDegenGC::op_cleanup_complete() {
467   ShenandoahGCPhase phase(ShenandoahPhaseTimings::degen_gc_cleanup_complete);
468   ShenandoahHeap::heap()->recycle_trash();
469 }
470 
471 void ShenandoahDegenGC::op_degenerated_fail() {
472   upgrade_to_full();
473   ShenandoahFullGC full_gc;
474   full_gc.op_full(GCCause::_shenandoah_upgrade_to_full_gc);
475 }
476 
477 void ShenandoahDegenGC::op_degenerated_futile() {
478   upgrade_to_full();
479   ShenandoahFullGC full_gc;
480   full_gc.op_full(GCCause::_shenandoah_upgrade_to_full_gc);
481 }
482 
483 const char* ShenandoahDegenGC::degen_event_message(ShenandoahDegenPoint point) const {
484   const ShenandoahHeap* heap = ShenandoahHeap::heap();
485   switch (point) {
486     case _degenerated_unset:
487       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (<UNSET>)");
488     case _degenerated_outside_cycle:
489       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (Outside of Cycle)");
490     case _degenerated_roots:
491       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (Roots)");
492     case _degenerated_mark:
493       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (Mark)");
494     case _degenerated_evac:
495       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (Evacuation)");
496     case _degenerated_updaterefs:
497       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (Update Refs)");
498     default:
499       ShouldNotReachHere();
500       SHENANDOAH_RETURN_EVENT_MESSAGE(heap, _generation->type(), "Pause Degenerated GC", " (?)");
501   }
502 }
503 
504 void ShenandoahDegenGC::upgrade_to_full() {
505   log_info(gc)("Degenerate GC upgrading to Full GC");
506   ShenandoahHeap::heap()->shenandoah_policy()->record_degenerated_upgrade_to_full();
507   _upgraded_to_full = true;
508 }
509 
510 bool ShenandoahDegenGC::upgraded_to_full() {
511   return _upgraded_to_full;
512 }