1 /*
  2  * Copyright (c) 2021, Amazon.com, Inc. or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 
 27 #include "gc/shenandoah/heuristics/shenandoahOldHeuristics.hpp"
 28 #include "gc/shenandoah/shenandoahCollectionSet.hpp"
 29 #include "gc/shenandoah/shenandoahHeap.hpp"
 30 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
 31 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
 32 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
 33 #include "utilities/quickSort.hpp"
 34 
 35 uint ShenandoahOldHeuristics::NOT_FOUND = -1U;
 36 
 37 ShenandoahOldHeuristics::ShenandoahOldHeuristics(ShenandoahOldGeneration* generation, ShenandoahHeuristics* trigger_heuristic) :
 38   ShenandoahHeuristics(generation),
 39   _first_pinned_candidate(NOT_FOUND),
 40   _last_old_collection_candidate(0),
 41   _next_old_collection_candidate(0),
 42   _last_old_region(0),
 43   _trigger_heuristic(trigger_heuristic),
 44   _promotion_failed(false),
 45   _old_generation(generation)
 46 {
 47   assert(_generation->generation_mode() == OLD, "This service only available for old-gc heuristics");
 48 }
 49 
 50 bool ShenandoahOldHeuristics::prime_collection_set(ShenandoahCollectionSet* collection_set) {
 51   if (unprocessed_old_collection_candidates() == 0) {
 52     return false;
 53   }
 54 
 55   _first_pinned_candidate = NOT_FOUND;
 56 
 57   ShenandoahHeap* heap = ShenandoahHeap::heap();
 58   uint included_old_regions = 0;
 59   size_t evacuated_old_bytes = 0;
 60   size_t collected_old_bytes = 0;
 61 
 62   // If a region is put into the collection set, then this region's free (not yet used) bytes are no longer
 63   // "available" to hold the results of other evacuations.  This may cause a decrease in the remaining amount
 64   // of memory that can still be evacuated.  We address this by reducing the evacuation budget by the amount
 65   // of live memory in that region and by the amount of unallocated memory in that region if the evacuation
 66   // budget is constrained by availability of free memory.
 67   size_t old_evacuation_budget = (size_t) ((double) heap->get_old_evac_reserve() / ShenandoahEvacWaste);
 68   size_t remaining_old_evacuation_budget = old_evacuation_budget;
 69   size_t lost_evacuation_capacity = 0;
 70   log_info(gc)("Choose old regions for mixed collection: old evacuation budget: " SIZE_FORMAT "%s, candidates: %u",
 71                byte_size_in_proper_unit(old_evacuation_budget), proper_unit_for_byte_size(old_evacuation_budget),
 72                unprocessed_old_collection_candidates());
 73 
 74   // The number of old-gen regions that were selected as candidates for collection at the end of the most recent old-gen
 75   // concurrent marking phase and have not yet been collected is represented by unprocessed_old_collection_candidates()
 76   while (unprocessed_old_collection_candidates() > 0) {
 77     // Old collection candidates are sorted in order of decreasing garbage contained therein.
 78     ShenandoahHeapRegion* r = next_old_collection_candidate();
 79     if (r == nullptr) {
 80       break;
 81     }
 82 
 83     // If we choose region r to be collected, then we need to decrease the capacity to hold other evacuations by
 84     // the size of r's free memory.
 85 
 86     // It's probably overkill to compensate with lost_evacuation_capacity.  But it's the safe thing to do and
 87     //  has minimal impact on content of primed collection set.
 88     if (r->get_live_data_bytes() + lost_evacuation_capacity <= remaining_old_evacuation_budget) {
 89       // Decrement remaining evacuation budget by bytes that will be copied.
 90       lost_evacuation_capacity += r->free();
 91       remaining_old_evacuation_budget -= r->get_live_data_bytes();
 92       collection_set->add_region(r);
 93       included_old_regions++;
 94       evacuated_old_bytes += r->get_live_data_bytes();
 95       collected_old_bytes += r->garbage();
 96       consume_old_collection_candidate();
 97     } else {
 98       break;
 99     }
100   }
101 
102   if (_first_pinned_candidate != NOT_FOUND) {
103     // Need to deal with pinned regions
104     slide_pinned_regions_to_front();
105   }
106 
107   if (included_old_regions > 0) {
108     log_info(gc)("Old-gen piggyback evac (" UINT32_FORMAT " regions, evacuating " SIZE_FORMAT "%s, reclaiming: " SIZE_FORMAT "%s)",
109                  included_old_regions,
110                  byte_size_in_proper_unit(evacuated_old_bytes), proper_unit_for_byte_size(evacuated_old_bytes),
111                  byte_size_in_proper_unit(collected_old_bytes), proper_unit_for_byte_size(collected_old_bytes));
112   }
113 
114   if (unprocessed_old_collection_candidates() == 0) {
115     _old_generation->transition_to(ShenandoahOldGeneration::IDLE);
116   }
117 
118   return (included_old_regions > 0);
119 }
120 
121 void ShenandoahOldHeuristics::slide_pinned_regions_to_front() {
122   // Find the first unpinned region to the left of the next region that
123   // will be added to the collection set. These regions will have been
124   // added to the cset, so we can use them to hold pointers to regions
125   // that were pinned when the cset was chosen.
126   // [ r p r p p p r r ]
127   //     ^         ^ ^
128   //     |         | | pointer to next region to add to a mixed collection is here.
129   //     |         | first r to the left should be in the collection set now.
130   //     | first pinned region, we don't need to look past this
131   uint write_index = NOT_FOUND;
132   for (uint search = _next_old_collection_candidate - 1; search > _first_pinned_candidate; --search) {
133     ShenandoahHeapRegion* region = _region_data[search]._region;
134     if (!region->is_pinned()) {
135       write_index = search;
136       assert(region->is_cset(), "Expected unpinned region to be added to the collection set.");
137       break;
138     }
139   }
140 
141   // If we could not find an unpinned region, it means there are no slots available
142   // to move up the pinned regions. In this case, we just reset our next index in the
143   // hopes that some of these regions will become unpinned before the next mixed
144   // collection. We may want to bailout of here instead, as it should be quite
145   // rare to have so many pinned regions and may indicate something is wrong.
146   if (write_index == NOT_FOUND) {
147     assert(_first_pinned_candidate != NOT_FOUND, "Should only be here if there are pinned regions.");
148     _next_old_collection_candidate = _first_pinned_candidate;
149     return;
150   }
151 
152   // Find pinned regions to the left and move their pointer into a slot
153   // that was pointing at a region that has been added to the cset (or was pointing
154   // to a pinned region that we've already moved up). We are done when the leftmost
155   // pinned region has been slid up.
156   // [ r p r x p p p r ]
157   //         ^       ^
158   //         |       | next region for mixed collections
159   //         | Write pointer is here. We know this region is already in the cset
160   //         | so we can clobber it with the next pinned region we find.
161   for (int32_t search = write_index - 1; search >= (int32_t)_first_pinned_candidate; --search) {
162     RegionData& skipped = _region_data[search];
163     if (skipped._region->is_pinned()) {
164       RegionData& available_slot = _region_data[write_index];
165       available_slot._region = skipped._region;
166       available_slot._garbage = skipped._garbage;
167       --write_index;
168     }
169   }
170 
171   // Update to read from the leftmost pinned region. Plus one here because we decremented
172   // the write index to hold the next found pinned region. We are just moving it back now
173   // to point to the first pinned region.
174   _next_old_collection_candidate = write_index + 1;
175 }
176 
177 // Both arguments are don't cares for old-gen collections
178 void ShenandoahOldHeuristics::choose_collection_set(ShenandoahCollectionSet* collection_set,
179                                                     ShenandoahOldHeuristics* old_heuristics) {
180   assert((collection_set == nullptr) && (old_heuristics == nullptr),
181          "Expect null arguments in ShenandoahOldHeuristics::choose_collection_set()");
182   // Old-gen doesn't actually choose a collection set to be evacuated by its own gang of worker tasks.
183   // Instead, it computes the set of regions to be evacuated by subsequent young-gen evacuation passes.
184   prepare_for_old_collections();
185 }
186 
187 void ShenandoahOldHeuristics::prepare_for_old_collections() {
188   assert(_generation->generation_mode() == OLD, "This service only available for old-gc heuristics");
189   ShenandoahHeap* heap = ShenandoahHeap::heap();
190 
191   size_t cand_idx = 0;
192   size_t total_garbage = 0;
193   size_t num_regions = heap->num_regions();
194   size_t immediate_garbage = 0;
195   size_t immediate_regions = 0;
196 
197   RegionData* candidates = _region_data;
198   for (size_t i = 0; i < num_regions; i++) {
199     ShenandoahHeapRegion* region = heap->get_region(i);
200     if (!in_generation(region)) {
201       continue;
202     }
203 
204     size_t garbage = region->garbage();
205     total_garbage += garbage;
206 
207     if (region->is_regular() || region->is_pinned()) {
208       if (!region->has_live()) {
209         assert(!region->is_pinned(), "Pinned region should have live (pinned) objects.");
210         region->make_trash_immediate();
211         immediate_regions++;
212         immediate_garbage += garbage;
213       } else {
214         region->begin_preemptible_coalesce_and_fill();
215         candidates[cand_idx]._region = region;
216         candidates[cand_idx]._garbage = garbage;
217         cand_idx++;
218       }
219     } else if (region->is_humongous_start()) {
220       if (!region->has_live()) {
221         // The humongous object is dead, we can just return this region and the continuations
222         // immediately to the freeset - no evacuations are necessary here. The continuations
223         // will be made into trash by this method, so they'll be skipped by the 'is_regular'
224         // check above, but we still need to count the start region.
225         immediate_regions++;
226         immediate_garbage += garbage;
227         size_t region_count = heap->trash_humongous_region_at(region);
228         log_debug(gc)("Trashed " SIZE_FORMAT " regions for humongous object.", region_count);
229       }
230     } else if (region->is_trash()) {
231       // Count humongous objects made into trash here.
232       immediate_regions++;
233       immediate_garbage += garbage;
234     }
235   }
236 
237   // TODO: Consider not running mixed collects if we recovered some threshold percentage of memory from immediate garbage.
238   // This would be similar to young and global collections shortcutting evacuation, though we'd probably want a separate
239   // threshold for the old generation.
240 
241   // Prioritize regions to select garbage-first regions
242   QuickSort::sort<RegionData>(candidates, cand_idx, compare_by_garbage, false);
243 
244   // Any old-gen region that contains (ShenandoahOldGarbageThreshold (default value 25))% garbage or more is to
245   // be evacuated.
246   //
247   // TODO: allow ShenandoahOldGarbageThreshold to be determined adaptively, by heuristics.
248 
249 
250   const size_t garbage_threshold = ShenandoahHeapRegion::region_size_bytes() * ShenandoahOldGarbageThreshold / 100;
251   size_t candidates_garbage = 0;
252   _last_old_region = (uint)cand_idx;
253   _last_old_collection_candidate = (uint)cand_idx;
254   _next_old_collection_candidate = 0;
255 
256   for (size_t i = 0; i < cand_idx; i++) {
257     if (candidates[i]._garbage < garbage_threshold) {
258       // Candidates are sorted in decreasing order of garbage, so no regions after this will be above the threshold
259       _last_old_collection_candidate = (uint)i;
260       break;
261     }
262     candidates_garbage += candidates[i]._garbage;
263   }
264 
265   // Note that we do not coalesce and fill occupied humongous regions
266   // HR: humongous regions, RR: regular regions, CF: coalesce and fill regions
267   size_t collectable_garbage = immediate_garbage + candidates_garbage;
268   log_info(gc)("Old-Gen Collectable Garbage: " SIZE_FORMAT "%s over " UINT32_FORMAT " regions, "
269                "Old-Gen Immediate Garbage: " SIZE_FORMAT "%s over " SIZE_FORMAT " regions.",
270                byte_size_in_proper_unit(collectable_garbage), proper_unit_for_byte_size(collectable_garbage), _last_old_collection_candidate,
271                byte_size_in_proper_unit(immediate_garbage), proper_unit_for_byte_size(immediate_garbage), immediate_regions);
272 
273   if (unprocessed_old_collection_candidates() == 0) {
274     _old_generation->transition_to(ShenandoahOldGeneration::IDLE);
275   } else {
276     _old_generation->transition_to(ShenandoahOldGeneration::WAITING);
277   }
278 }
279 
280 uint ShenandoahOldHeuristics::last_old_collection_candidate_index() {
281   return _last_old_collection_candidate;
282 }
283 
284 uint ShenandoahOldHeuristics::unprocessed_old_collection_candidates() {
285   return _last_old_collection_candidate - _next_old_collection_candidate;
286 }
287 
288 ShenandoahHeapRegion* ShenandoahOldHeuristics::next_old_collection_candidate() {
289   while (_next_old_collection_candidate < _last_old_collection_candidate) {
290     ShenandoahHeapRegion* next = _region_data[_next_old_collection_candidate]._region;
291     if (!next->is_pinned()) {
292       return next;
293     } else {
294       assert(next->is_pinned(), "sanity");
295       if (_first_pinned_candidate == NOT_FOUND) {
296         _first_pinned_candidate = _next_old_collection_candidate;
297       }
298     }
299 
300     _next_old_collection_candidate++;
301   }
302   return nullptr;
303 }
304 
305 void ShenandoahOldHeuristics::consume_old_collection_candidate() {
306   _next_old_collection_candidate++;
307 }
308 
309 uint ShenandoahOldHeuristics::last_old_region_index() const {
310   return _last_old_region;
311 }
312 
313 unsigned int ShenandoahOldHeuristics::get_coalesce_and_fill_candidates(ShenandoahHeapRegion** buffer) {
314   uint end = _last_old_region;
315   uint index = _next_old_collection_candidate;
316   while (index < end) {
317     *buffer++ = _region_data[index++]._region;
318   }
319   return _last_old_region - _next_old_collection_candidate;
320 }
321 
322 void ShenandoahOldHeuristics::abandon_collection_candidates() {
323   _last_old_collection_candidate = 0;
324   _next_old_collection_candidate = 0;
325   _last_old_region = 0;
326 }
327 
328 void ShenandoahOldHeuristics::handle_promotion_failure() {
329   if (!_promotion_failed) {
330     if (ShenandoahHeap::heap()->generation_sizer()->transfer_capacity(_old_generation)) {
331       log_info(gc)("Increased size of old generation due to promotion failure.");
332     }
333     // TODO: Increase tenuring threshold to push back on promotions.
334   }
335   _promotion_failed = true;
336 }
337 
338 void ShenandoahOldHeuristics::record_cycle_start() {
339   _promotion_failed = false;
340   _trigger_heuristic->record_cycle_start();
341 }
342 
343 void ShenandoahOldHeuristics::record_cycle_end() {
344   _trigger_heuristic->record_cycle_end();
345 }
346 
347 bool ShenandoahOldHeuristics::should_start_gc() {
348   // Cannot start a new old-gen GC until previous one has finished.
349   //
350   // Future refinement: under certain circumstances, we might be more sophisticated about this choice.
351   // For example, we could choose to abandon the previous old collection before it has completed evacuations.
352   if (unprocessed_old_collection_candidates() > 0) {
353     return false;
354   }
355 
356   // If there's been a promotion failure (and we don't have regions already scheduled for evacuation),
357   // start a new old generation collection.
358   if (_promotion_failed) {
359     log_info(gc)("Trigger: Promotion Failure");
360     return true;
361   }
362 
363   // Otherwise, defer to configured heuristic for gc trigger.
364   return _trigger_heuristic->should_start_gc();
365 }
366 
367 bool ShenandoahOldHeuristics::should_degenerate_cycle() {
368   return _trigger_heuristic->should_degenerate_cycle();
369 }
370 
371 void ShenandoahOldHeuristics::record_success_concurrent(bool abbreviated) {
372   _trigger_heuristic->record_success_concurrent(abbreviated);
373 }
374 
375 void ShenandoahOldHeuristics::record_success_degenerated() {
376   _trigger_heuristic->record_success_degenerated();
377 }
378 
379 void ShenandoahOldHeuristics::record_success_full() {
380   _trigger_heuristic->record_success_full();
381 }
382 
383 void ShenandoahOldHeuristics::record_allocation_failure_gc() {
384   _trigger_heuristic->record_allocation_failure_gc();
385 }
386 
387 void ShenandoahOldHeuristics::record_requested_gc() {
388   _trigger_heuristic->record_requested_gc();
389 }
390 
391 void ShenandoahOldHeuristics::reset_gc_learning() {
392   _trigger_heuristic->reset_gc_learning();
393 }
394 
395 bool ShenandoahOldHeuristics::can_unload_classes() {
396   return _trigger_heuristic->can_unload_classes();
397 }
398 
399 bool ShenandoahOldHeuristics::can_unload_classes_normal() {
400   return _trigger_heuristic->can_unload_classes_normal();
401 }
402 
403 bool ShenandoahOldHeuristics::should_unload_classes() {
404   return _trigger_heuristic->should_unload_classes();
405 }
406 
407 const char* ShenandoahOldHeuristics::name() {
408   static char name[128];
409   jio_snprintf(name, sizeof(name), "%s (OLD)", _trigger_heuristic->name());
410   return name;
411 }
412 
413 bool ShenandoahOldHeuristics::is_diagnostic() {
414   return false;
415 }
416 
417 bool ShenandoahOldHeuristics::is_experimental() {
418   return true;
419 }
420 
421 void ShenandoahOldHeuristics::choose_collection_set_from_regiondata(ShenandoahCollectionSet* set,
422                                                                     ShenandoahHeuristics::RegionData* data,
423                                                                     size_t data_size, size_t free) {
424   ShouldNotReachHere();
425 }
426 
427