1 /* 2 * Copyright (c) 2018, 2020, Red Hat, Inc. All rights reserved. 3 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "gc/shared/gcCause.hpp" 28 #include "gc/shenandoah/shenandoahCollectorPolicy.hpp" 29 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp" 30 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp" 31 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp" 32 #include "logging/log.hpp" 33 #include "logging/logTag.hpp" 34 #include "runtime/globals_extension.hpp" 35 #include "utilities/quickSort.hpp" 36 37 // sort by decreasing garbage (so most garbage comes first) 38 int ShenandoahHeuristics::compare_by_garbage(RegionData a, RegionData b) { 39 if (a.get_garbage() > b.get_garbage()) { 40 return -1; 41 } else if (a.get_garbage() < b.get_garbage()) { 42 return 1; 43 } else { 44 return 0; 45 } 46 } 47 48 ShenandoahHeuristics::ShenandoahHeuristics(ShenandoahSpaceInfo* space_info) : 49 _start_gc_is_pending(false), 50 _declined_trigger_count(0), 51 _most_recent_declined_trigger_count(0), 52 _space_info(space_info), 53 _region_data(nullptr), 54 _guaranteed_gc_interval(0), 55 _cycle_start(os::elapsedTime()), 56 _last_cycle_end(0), 57 _gc_times_learned(0), 58 _gc_time_penalties(0), 59 _gc_cycle_time_history(new TruncatedSeq(Moving_Average_Samples, ShenandoahAdaptiveDecayFactor)), 60 _metaspace_oom() 61 { 62 size_t num_regions = ShenandoahHeap::heap()->num_regions(); 63 assert(num_regions > 0, "Sanity"); 64 65 _region_data = NEW_C_HEAP_ARRAY(RegionData, num_regions, mtGC); 66 for (size_t i = 0; i < num_regions; i++) { 67 _region_data[i].clear(); 68 } 69 } 70 71 ShenandoahHeuristics::~ShenandoahHeuristics() { 72 FREE_C_HEAP_ARRAY(RegionGarbage, _region_data); 73 } 74 75 void ShenandoahHeuristics::choose_collection_set(ShenandoahCollectionSet* collection_set) { 76 assert(collection_set->is_empty(), "Must be empty"); 77 78 ShenandoahHeap* heap = ShenandoahHeap::heap(); 79 80 // Check all pinned regions have updated status before choosing the collection set. 81 heap->assert_pinned_region_status(); 82 83 // Step 1. Build up the region candidates we care about, rejecting losers and accepting winners right away. 84 85 size_t num_regions = heap->num_regions(); 86 87 RegionData* candidates = _region_data; 88 89 size_t cand_idx = 0; 90 91 size_t total_garbage = 0; 92 93 size_t immediate_garbage = 0; 94 size_t immediate_regions = 0; 95 96 size_t free = 0; 97 size_t free_regions = 0; 98 99 ShenandoahMarkingContext* const ctx = heap->complete_marking_context(); 100 101 for (size_t i = 0; i < num_regions; i++) { 102 ShenandoahHeapRegion* region = heap->get_region(i); 103 104 size_t garbage = region->garbage(); 105 total_garbage += garbage; 106 107 if (region->is_empty()) { 108 free_regions++; 109 free += ShenandoahHeapRegion::region_size_bytes(); 110 } else if (region->is_regular()) { 111 if (!region->has_live()) { 112 // We can recycle it right away and put it in the free set. 113 immediate_regions++; 114 immediate_garbage += garbage; 115 region->make_trash_immediate(); 116 } else { 117 // This is our candidate for later consideration. 118 candidates[cand_idx].set_region_and_garbage(region, garbage); 119 cand_idx++; 120 } 121 } else if (region->is_humongous_start()) { 122 // Reclaim humongous regions here, and count them as the immediate garbage 123 #ifdef ASSERT 124 bool reg_live = region->has_live(); 125 bool bm_live = ctx->is_marked(cast_to_oop(region->bottom())); 126 assert(reg_live == bm_live, 127 "Humongous liveness and marks should agree. Region live: %s; Bitmap live: %s; Region Live Words: " SIZE_FORMAT, 128 BOOL_TO_STR(reg_live), BOOL_TO_STR(bm_live), region->get_live_data_words()); 129 #endif 130 if (!region->has_live()) { 131 heap->trash_humongous_region_at(region); 132 133 // Count only the start. Continuations would be counted on "trash" path 134 immediate_regions++; 135 immediate_garbage += garbage; 136 } 137 } else if (region->is_trash()) { 138 // Count in just trashed collection set, during coalesced CM-with-UR 139 immediate_regions++; 140 immediate_garbage += garbage; 141 } 142 } 143 144 // Step 2. Look back at garbage statistics, and decide if we want to collect anything, 145 // given the amount of immediately reclaimable garbage. If we do, figure out the collection set. 146 147 assert (immediate_garbage <= total_garbage, 148 "Cannot have more immediate garbage than total garbage: " SIZE_FORMAT "%s vs " SIZE_FORMAT "%s", 149 byte_size_in_proper_unit(immediate_garbage), proper_unit_for_byte_size(immediate_garbage), 150 byte_size_in_proper_unit(total_garbage), proper_unit_for_byte_size(total_garbage)); 151 152 size_t immediate_percent = (total_garbage == 0) ? 0 : (immediate_garbage * 100 / total_garbage); 153 154 if (immediate_percent <= ShenandoahImmediateThreshold) { 155 choose_collection_set_from_regiondata(collection_set, candidates, cand_idx, immediate_garbage + free); 156 } 157 158 size_t cset_percent = (total_garbage == 0) ? 0 : (collection_set->garbage() * 100 / total_garbage); 159 size_t collectable_garbage = collection_set->garbage() + immediate_garbage; 160 size_t collectable_garbage_percent = (total_garbage == 0) ? 0 : (collectable_garbage * 100 / total_garbage); 161 162 log_info(gc, ergo)("Collectable Garbage: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%), " 163 "Immediate: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%), " SIZE_FORMAT " regions, " 164 "CSet: " SIZE_FORMAT "%s (" SIZE_FORMAT "%%), " SIZE_FORMAT " regions", 165 166 byte_size_in_proper_unit(collectable_garbage), 167 proper_unit_for_byte_size(collectable_garbage), 168 collectable_garbage_percent, 169 170 byte_size_in_proper_unit(immediate_garbage), 171 proper_unit_for_byte_size(immediate_garbage), 172 immediate_percent, 173 immediate_regions, 174 175 byte_size_in_proper_unit(collection_set->garbage()), 176 proper_unit_for_byte_size(collection_set->garbage()), 177 cset_percent, 178 collection_set->count()); 179 } 180 181 void ShenandoahHeuristics::record_cycle_start() { 182 _cycle_start = os::elapsedTime(); 183 } 184 185 void ShenandoahHeuristics::record_cycle_end() { 186 _last_cycle_end = os::elapsedTime(); 187 } 188 189 bool ShenandoahHeuristics::should_start_gc() { 190 if (_start_gc_is_pending) { 191 log_trigger("GC start is already pending"); 192 return true; 193 } 194 // Perform GC to cleanup metaspace 195 if (has_metaspace_oom()) { 196 // Some of vmTestbase/metaspace tests depend on following line to count GC cycles 197 log_trigger("%s", GCCause::to_string(GCCause::_metadata_GC_threshold)); 198 accept_trigger(); 199 return true; 200 } 201 202 if (_guaranteed_gc_interval > 0) { 203 double last_time_ms = (os::elapsedTime() - _last_cycle_end) * 1000; 204 if (last_time_ms > _guaranteed_gc_interval) { 205 log_trigger("Time since last GC (%.0f ms) is larger than guaranteed interval (" UINTX_FORMAT " ms)", 206 last_time_ms, _guaranteed_gc_interval); 207 accept_trigger(); 208 return true; 209 } 210 } 211 decline_trigger(); 212 return false; 213 } 214 215 bool ShenandoahHeuristics::should_degenerate_cycle() { 216 return ShenandoahHeap::heap()->shenandoah_policy()->consecutive_degenerated_gc_count() <= ShenandoahFullGCThreshold; 217 } 218 219 void ShenandoahHeuristics::adjust_penalty(intx step) { 220 assert(0 <= _gc_time_penalties && _gc_time_penalties <= 100, 221 "In range before adjustment: " INTX_FORMAT, _gc_time_penalties); 222 223 if ((_most_recent_declined_trigger_count <= Penalty_Free_Declinations) && (step > 0)) { 224 // Don't penalize if heuristics are not responsible for a negative outcome. Allow Penalty_Free_Declinations following 225 // previous GC for self calibration without penalty. 226 step = 0; 227 } 228 229 intx new_val = _gc_time_penalties + step; 230 if (new_val < 0) { 231 new_val = 0; 232 } 233 if (new_val > 100) { 234 new_val = 100; 235 } 236 _gc_time_penalties = new_val; 237 238 assert(0 <= _gc_time_penalties && _gc_time_penalties <= 100, 239 "In range after adjustment: " INTX_FORMAT, _gc_time_penalties); 240 } 241 242 void ShenandoahHeuristics::log_trigger(const char* fmt, ...) { 243 LogTarget(Info, gc) lt; 244 if (lt.is_enabled()) { 245 ResourceMark rm; 246 LogStream ls(lt); 247 ls.print_raw("Trigger", 7); 248 if (ShenandoahHeap::heap()->mode()->is_generational()) { 249 ls.print(" (%s)", _space_info->name()); 250 } 251 ls.print_raw(": ", 2); 252 va_list va; 253 va_start(va, fmt); 254 ls.vprint(fmt, va); 255 va_end(va); 256 ls.cr(); 257 } 258 } 259 260 void ShenandoahHeuristics::record_success_concurrent() { 261 _gc_cycle_time_history->add(elapsed_cycle_time()); 262 _gc_times_learned++; 263 264 adjust_penalty(Concurrent_Adjust); 265 } 266 267 void ShenandoahHeuristics::record_success_degenerated() { 268 adjust_penalty(Degenerated_Penalty); 269 } 270 271 void ShenandoahHeuristics::record_success_full() { 272 adjust_penalty(Full_Penalty); 273 } 274 275 void ShenandoahHeuristics::record_allocation_failure_gc() { 276 // Do nothing. 277 } 278 279 void ShenandoahHeuristics::record_requested_gc() { 280 // Assume users call System.gc() when external state changes significantly, 281 // which forces us to re-learn the GC timings and allocation rates. 282 _gc_times_learned = 0; 283 } 284 285 bool ShenandoahHeuristics::can_unload_classes() { 286 return ClassUnloading; 287 } 288 289 bool ShenandoahHeuristics::should_unload_classes() { 290 if (!can_unload_classes()) return false; 291 if (has_metaspace_oom()) return true; 292 return ClassUnloadingWithConcurrentMark; 293 } 294 295 void ShenandoahHeuristics::initialize() { 296 // Nothing to do by default. 297 } 298 299 double ShenandoahHeuristics::elapsed_cycle_time() const { 300 return os::elapsedTime() - _cycle_start; 301 }