1 /*
  2  * Copyright (c) 2018, 2021, Red Hat, Inc. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "gc/shared/gcCause.hpp"
 27 #include "gc/shenandoah/shenandoahAllocRequest.hpp"
 28 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
 29 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp"
 30 #include "gc/shenandoah/shenandoahGeneration.hpp"
 31 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 32 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
 33 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
 34 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
 35 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
 36 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
 37 #include "gc/shenandoah/mode/shenandoahMode.hpp"
 38 #include "logging/log.hpp"
 39 #include "logging/logTag.hpp"
 40 #include "runtime/globals_extension.hpp"
 41 
 42 int ShenandoahHeuristics::compare_by_garbage(RegionData a, RegionData b) {
 43   if (a._garbage > b._garbage)
 44     return -1;
 45   else if (a._garbage < b._garbage)
 46     return 1;
 47   else return 0;
 48 }
 49 
 50 ShenandoahHeuristics::ShenandoahHeuristics(ShenandoahGeneration* generation) :
 51   _generation(generation),
 52   _region_data(NULL),
 53   _degenerated_cycles_in_a_row(0),
 54   _successful_cycles_in_a_row(0),
 55   _guaranteed_gc_interval(0),
 56   _cycle_start(os::elapsedTime()),
 57   _last_cycle_end(0),
 58   _gc_times_learned(0),
 59   _gc_time_penalties(0),
 60   _gc_time_history(new TruncatedSeq(10, ShenandoahAdaptiveDecayFactor)),
 61   _live_memory_last_cycle(0),
 62   _live_memory_penultimate_cycle(0),
 63   _metaspace_oom()
 64 {
 65   // No unloading during concurrent mark? Communicate that to heuristics
 66   if (!ClassUnloadingWithConcurrentMark) {
 67     FLAG_SET_DEFAULT(ShenandoahUnloadClassesFrequency, 0);
 68   }
 69 
 70   size_t num_regions = ShenandoahHeap::heap()->num_regions();
 71   assert(num_regions > 0, "Sanity");
 72 
 73   _region_data = NEW_C_HEAP_ARRAY(RegionData, num_regions, mtGC);
 74 }
 75 
 76 ShenandoahHeuristics::~ShenandoahHeuristics() {
 77   FREE_C_HEAP_ARRAY(RegionGarbage, _region_data);
 78 }
 79 
 80 size_t ShenandoahHeuristics::select_aged_regions(size_t old_available, size_t num_regions, bool preselected_regions[]) {
 81   ShenandoahHeap* heap = ShenandoahHeap::heap();
 82   size_t old_consumed = 0;
 83   if (heap->mode()->is_generational()) {
 84     for (size_t i = 0; i < num_regions; i++) {
 85       ShenandoahHeapRegion* region = heap->get_region(i);
 86       if (in_generation(region) && !region->is_empty() && region->is_regular() && (region->age() >= InitialTenuringThreshold)) {
 87         size_t promotion_need = (size_t) (region->get_live_data_bytes() * ShenandoahEvacWaste);
 88         if (old_consumed + promotion_need < old_available) {
 89           old_consumed += promotion_need;
 90           preselected_regions[i] = true;
 91         }
 92         // Note that we keep going even if one region is excluded from selection.  Subsequent regions may be selected
 93         // if they have smaller live data.
 94       }
 95     }
 96   }
 97   return old_consumed;
 98 }
 99 
100 void ShenandoahHeuristics::choose_collection_set(ShenandoahCollectionSet* collection_set, ShenandoahOldHeuristics* old_heuristics) {
101   ShenandoahHeap* heap = ShenandoahHeap::heap();
102   bool is_generational = heap->mode()->is_generational();
103 
104   assert(collection_set->count() == 0, "Must be empty");
105   assert(_generation->generation_mode() != OLD, "Old GC invokes ShenandoahOldHeuristics::choose_collection_set()");
106 
107   // Check all pinned regions have updated status before choosing the collection set.
108   heap->assert_pinned_region_status();
109 
110   // Step 1. Build up the region candidates we care about, rejecting losers and accepting winners right away.
111 
112   size_t num_regions = heap->num_regions();
113 
114   RegionData* candidates = _region_data;
115 
116   size_t cand_idx = 0;
117 
118   size_t total_garbage = 0;
119 
120   size_t immediate_garbage = 0;
121   size_t immediate_regions = 0;
122 
123   size_t free = 0;
124   size_t free_regions = 0;
125   size_t live_memory = 0;
126 
127   for (size_t i = 0; i < num_regions; i++) {
128     ShenandoahHeapRegion* region = heap->get_region(i);
129     if (!in_generation(region)) {
130       continue;
131     }
132 
133     size_t garbage = region->garbage();
134     total_garbage += garbage;
135     if (region->is_empty()) {
136       free_regions++;
137       free += ShenandoahHeapRegion::region_size_bytes();
138     } else if (region->is_regular()) {
139       if (!region->has_live()) {
140         // We can recycle it right away and put it in the free set.
141         immediate_regions++;
142         immediate_garbage += garbage;
143         region->make_trash_immediate();
144       } else {
145         assert (_generation->generation_mode() != OLD, "OLD is handled elsewhere");
146         live_memory += region->get_live_data_bytes();
147         // This is our candidate for later consideration.
148         candidates[cand_idx]._region = region;
149         if (is_generational && collection_set->is_preselected(i)) {
150           // If region is preselected, we know mode()->is_generational() and region->age() >= InitialTenuringThreshold)
151           garbage = ShenandoahHeapRegion::region_size_bytes();
152         }
153         candidates[cand_idx]._garbage = garbage;
154         cand_idx++;
155       }
156     } else if (region->is_humongous_start()) {
157 
158       // Reclaim humongous regions here, and count them as the immediate garbage
159 #ifdef ASSERT
160       bool reg_live = region->has_live();
161       bool bm_live = heap->complete_marking_context()->is_marked(cast_to_oop(region->bottom()));
162       assert(reg_live == bm_live,
163              "Humongous liveness and marks should agree. Region live: %s; Bitmap live: %s; Region Live Words: " SIZE_FORMAT,
164              BOOL_TO_STR(reg_live), BOOL_TO_STR(bm_live), region->get_live_data_words());
165 #endif
166       if (!region->has_live()) {
167         heap->trash_humongous_region_at(region);
168 
169         // Count only the start. Continuations would be counted on "trash" path
170         immediate_regions++;
171         immediate_garbage += garbage;
172       } else {
173         live_memory += region->get_live_data_bytes();
174       }
175     } else if (region->is_trash()) {
176       // Count in just trashed collection set, during coalesced CM-with-UR
177       immediate_regions++;
178       immediate_garbage += garbage;
179     } else {                      // region->is_humongous_cont() and !region->is_trash()
180       live_memory += region->get_live_data_bytes();
181     }
182   }
183 
184   save_last_live_memory(live_memory);
185 
186   // Step 2. Look back at garbage statistics, and decide if we want to collect anything,
187   // given the amount of immediately reclaimable garbage. If we do, figure out the collection set.
188 
189   assert (immediate_garbage <= total_garbage,
190           "Cannot have more immediate garbage than total garbage: " SIZE_FORMAT "%s vs " SIZE_FORMAT "%s",
191           byte_size_in_proper_unit(immediate_garbage), proper_unit_for_byte_size(immediate_garbage),
192           byte_size_in_proper_unit(total_garbage),     proper_unit_for_byte_size(total_garbage));
193 
194   size_t immediate_percent = (total_garbage == 0) ? 0 : (immediate_garbage * 100 / total_garbage);
195   collection_set->set_immediate_trash(immediate_garbage);
196 
197   if (immediate_percent <= ShenandoahImmediateThreshold) {
198     if (old_heuristics != NULL) {
199       old_heuristics->prime_collection_set(collection_set);
200     }
201     // else, this is global collection and doesn't need to prime_collection_set
202 
203     // Add young-gen regions into the collection set.  This is a virtual call, implemented differently by each
204     // of the heuristics subclasses.
205     choose_collection_set_from_regiondata(collection_set, candidates, cand_idx, immediate_garbage + free);
206   } else {
207     // we're going to skip evacuation and update refs because we reclaimed sufficient amounts of immediate garbage.
208     heap->shenandoah_policy()->record_abbreviated_cycle();
209   }
210 
211   if (collection_set->has_old_regions()) {
212     heap->shenandoah_policy()->record_mixed_cycle();
213   }
214 
215   size_t cset_percent = (total_garbage == 0) ? 0 : (collection_set->garbage() * 100 / total_garbage);
216   size_t collectable_garbage = collection_set->garbage() + immediate_garbage;
217   size_t collectable_garbage_percent = (total_garbage == 0) ? 0 : (collectable_garbage * 100 / total_garbage);
218 
219   log_info(gc, ergo)("Collectable Garbage: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%), "
220                      "Immediate: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%), "
221                      "CSet: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%)",
222 
223                      byte_size_in_proper_unit(collectable_garbage),
224                      proper_unit_for_byte_size(collectable_garbage),
225                      collectable_garbage_percent,
226 
227                      byte_size_in_proper_unit(immediate_garbage),
228                      proper_unit_for_byte_size(immediate_garbage),
229                      immediate_percent,
230 
231                      byte_size_in_proper_unit(collection_set->garbage()),
232                      proper_unit_for_byte_size(collection_set->garbage()),
233                      cset_percent);
234 
235   size_t bytes_evacuated = collection_set->get_bytes_reserved_for_evacuation();
236   log_info(gc, ergo)("Total Evacuation: " SIZE_FORMAT "%s",
237                      byte_size_in_proper_unit(bytes_evacuated), proper_unit_for_byte_size(bytes_evacuated));
238 }
239 
240 void ShenandoahHeuristics::record_cycle_start() {
241   _cycle_start = os::elapsedTime();
242 }
243 
244 void ShenandoahHeuristics::record_cycle_end() {
245   _last_cycle_end = os::elapsedTime();
246 }
247 
248 bool ShenandoahHeuristics::should_start_gc() {
249   // Perform GC to cleanup metaspace
250   if (has_metaspace_oom()) {
251     // Some of vmTestbase/metaspace tests depend on following line to count GC cycles
252     log_info(gc)("Trigger: %s", GCCause::to_string(GCCause::_metadata_GC_threshold));
253     return true;
254   }
255 
256   if (_guaranteed_gc_interval > 0) {
257     double last_time_ms = (os::elapsedTime() - _last_cycle_end) * 1000;
258     if (last_time_ms > _guaranteed_gc_interval) {
259       log_info(gc)("Trigger (%s): Time since last GC (%.0f ms) is larger than guaranteed interval (" UINTX_FORMAT " ms)",
260                    _generation->name(), last_time_ms, _guaranteed_gc_interval);
261       return true;
262     }
263   }
264 
265   return false;
266 }
267 
268 bool ShenandoahHeuristics::should_degenerate_cycle() {
269   return _degenerated_cycles_in_a_row <= ShenandoahFullGCThreshold;
270 }
271 
272 void ShenandoahHeuristics::adjust_penalty(intx step) {
273   assert(0 <= _gc_time_penalties && _gc_time_penalties <= 100,
274          "In range before adjustment: " INTX_FORMAT, _gc_time_penalties);
275 
276   intx new_val = _gc_time_penalties + step;
277   if (new_val < 0) {
278     new_val = 0;
279   }
280   if (new_val > 100) {
281     new_val = 100;
282   }
283   _gc_time_penalties = new_val;
284 
285   assert(0 <= _gc_time_penalties && _gc_time_penalties <= 100,
286          "In range after adjustment: " INTX_FORMAT, _gc_time_penalties);
287 }
288 
289 void ShenandoahHeuristics::record_success_concurrent(bool abbreviated) {
290   _degenerated_cycles_in_a_row = 0;
291   _successful_cycles_in_a_row++;
292 
293   if (!(abbreviated && ShenandoahAdaptiveIgnoreShortCycles)) {
294     _gc_time_history->add(time_since_last_gc());
295     _gc_times_learned++;
296   }
297 
298   adjust_penalty(Concurrent_Adjust);
299 }
300 
301 void ShenandoahHeuristics::record_success_degenerated() {
302   _degenerated_cycles_in_a_row++;
303   _successful_cycles_in_a_row = 0;
304 
305   adjust_penalty(Degenerated_Penalty);
306 }
307 
308 void ShenandoahHeuristics::record_success_full() {
309   _degenerated_cycles_in_a_row = 0;
310   _successful_cycles_in_a_row++;
311 
312   adjust_penalty(Full_Penalty);
313 }
314 
315 void ShenandoahHeuristics::record_allocation_failure_gc() {
316   // Do nothing.
317 }
318 
319 void ShenandoahHeuristics::record_requested_gc() {
320   // Assume users call System.gc() when external state changes significantly,
321   // which forces us to re-learn the GC timings and allocation rates.
322   _gc_times_learned = 0;
323 }
324 
325 bool ShenandoahHeuristics::can_unload_classes() {
326   if (!ClassUnloading) return false;
327   return true;
328 }
329 
330 bool ShenandoahHeuristics::can_unload_classes_normal() {
331   if (!can_unload_classes()) return false;
332   if (has_metaspace_oom()) return true;
333   if (!ClassUnloadingWithConcurrentMark) return false;
334   if (ShenandoahUnloadClassesFrequency == 0) return false;
335   return true;
336 }
337 
338 bool ShenandoahHeuristics::should_unload_classes() {
339   if (!can_unload_classes_normal()) return false;
340   if (has_metaspace_oom()) return true;
341   size_t cycle = ShenandoahHeap::heap()->shenandoah_policy()->cycle_counter();
342   // Unload classes every Nth GC cycle.
343   // This should not happen in the same cycle as process_references to amortize costs.
344   // Offsetting by one is enough to break the rendezvous when periods are equal.
345   // When periods are not equal, offsetting by one is just as good as any other guess.
346   return (cycle + 1) % ShenandoahUnloadClassesFrequency == 0;
347 }
348 
349 void ShenandoahHeuristics::initialize() {
350   // Nothing to do by default.
351 }
352 
353 double ShenandoahHeuristics::time_since_last_gc() const {
354   return os::elapsedTime() - _cycle_start;
355 }
356 
357 bool ShenandoahHeuristics::in_generation(ShenandoahHeapRegion* region) {
358   return ((_generation->generation_mode() == GLOBAL)
359           || (_generation->generation_mode() == YOUNG && region->affiliation() == YOUNG_GENERATION)
360           || (_generation->generation_mode() == OLD && region->affiliation() == OLD_GENERATION));
361 }
362 
363 size_t ShenandoahHeuristics::min_free_threshold() {
364   size_t min_free_threshold =
365       _generation->generation_mode() == GenerationMode::OLD
366           ? ShenandoahOldMinFreeThreshold
367           : ShenandoahMinFreeThreshold;
368   return _generation->soft_max_capacity() / 100 * min_free_threshold;
369 }
370 
371 void ShenandoahHeuristics::save_last_live_memory(size_t live_memory) {
372   _live_memory_penultimate_cycle = _live_memory_last_cycle;
373   _live_memory_last_cycle = live_memory;
374 }
375 
376 size_t ShenandoahHeuristics::get_last_live_memory() {
377   return _live_memory_last_cycle;
378 }
379 
380 size_t ShenandoahHeuristics::get_penultimate_live_memory() {
381   return _live_memory_penultimate_cycle;
382 }