1 /*
2 * Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2013, 2019, Red Hat, Inc. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "gc/shared/space.hpp"
28 #include "gc/shared/tlab_globals.hpp"
29 #include "gc/shenandoah/shenandoahHeapRegionSet.inline.hpp"
30 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
31 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
32 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
33 #include "jfr/jfrEvents.hpp"
34 #include "memory/allocation.hpp"
35 #include "memory/iterator.inline.hpp"
36 #include "memory/resourceArea.hpp"
37 #include "memory/universe.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "runtime/atomic.hpp"
40 #include "runtime/globals_extension.hpp"
41 #include "runtime/java.hpp"
42 #include "runtime/mutexLocker.hpp"
43 #include "runtime/os.hpp"
44 #include "runtime/safepoint.hpp"
45 #include "utilities/powerOfTwo.hpp"
46
47 size_t ShenandoahHeapRegion::RegionCount = 0;
48 size_t ShenandoahHeapRegion::RegionSizeBytes = 0;
49 size_t ShenandoahHeapRegion::RegionSizeWords = 0;
50 size_t ShenandoahHeapRegion::RegionSizeBytesShift = 0;
51 size_t ShenandoahHeapRegion::RegionSizeWordsShift = 0;
52 size_t ShenandoahHeapRegion::RegionSizeBytesMask = 0;
53 size_t ShenandoahHeapRegion::RegionSizeWordsMask = 0;
54 size_t ShenandoahHeapRegion::MaxTLABSizeBytes = 0;
55 size_t ShenandoahHeapRegion::MaxTLABSizeWords = 0;
56
57 ShenandoahHeapRegion::ShenandoahHeapRegion(HeapWord* start, size_t index, bool committed) :
58 _index(index),
59 _bottom(start),
60 _end(start + RegionSizeWords),
61 _new_top(nullptr),
62 _empty_time(os::elapsedTime()),
63 _state(committed ? _empty_committed : _empty_uncommitted),
64 _top(start),
65 _tlab_allocs(0),
66 _gclab_allocs(0),
67 _live_data(0),
68 _critical_pins(0),
69 _update_watermark(start) {
70
71 assert(Universe::on_page_boundary(_bottom) && Universe::on_page_boundary(_end),
72 "invalid space boundaries");
73 if (ZapUnusedHeapArea && committed) {
74 SpaceMangler::mangle_region(MemRegion(_bottom, _end));
75 }
76 }
77
78 void ShenandoahHeapRegion::report_illegal_transition(const char *method) {
79 stringStream ss;
80 ss.print("Illegal region state transition from \"%s\", at %s\n ", region_state_to_string(_state), method);
81 print_on(&ss);
82 fatal("%s", ss.freeze());
83 }
84
85 void ShenandoahHeapRegion::make_regular_allocation() {
86 shenandoah_assert_heaplocked();
87
88 switch (_state) {
89 case _empty_uncommitted:
90 do_commit();
91 case _empty_committed:
92 set_state(_regular);
93 case _regular:
94 case _pinned:
95 return;
96 default:
97 report_illegal_transition("regular allocation");
98 }
99 }
100
101 void ShenandoahHeapRegion::make_regular_bypass() {
102 shenandoah_assert_heaplocked();
103 assert (!Universe::is_fully_initialized() ||
104 ShenandoahHeap::heap()->is_full_gc_in_progress() ||
105 ShenandoahHeap::heap()->is_degenerated_gc_in_progress(),
106 "Only for STW GC or when Universe is initializing (CDS)");
107
108 switch (_state) {
109 case _empty_uncommitted:
110 do_commit();
111 case _empty_committed:
112 case _cset:
113 case _humongous_start:
114 case _humongous_cont:
115 set_state(_regular);
116 return;
117 case _pinned_cset:
118 set_state(_pinned);
119 return;
120 case _regular:
121 case _pinned:
122 return;
123 default:
124 report_illegal_transition("regular bypass");
125 }
126 }
127
128 void ShenandoahHeapRegion::make_humongous_start() {
129 shenandoah_assert_heaplocked();
130 switch (_state) {
131 case _empty_uncommitted:
132 do_commit();
133 case _empty_committed:
134 set_state(_humongous_start);
135 return;
136 default:
137 report_illegal_transition("humongous start allocation");
138 }
139 }
140
141 void ShenandoahHeapRegion::make_humongous_start_bypass() {
142 shenandoah_assert_heaplocked();
143 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
144
145 switch (_state) {
146 case _empty_committed:
147 case _regular:
148 case _humongous_start:
149 case _humongous_cont:
150 set_state(_humongous_start);
151 return;
152 default:
153 report_illegal_transition("humongous start bypass");
154 }
155 }
156
157 void ShenandoahHeapRegion::make_humongous_cont() {
158 shenandoah_assert_heaplocked();
159 switch (_state) {
160 case _empty_uncommitted:
161 do_commit();
162 case _empty_committed:
163 set_state(_humongous_cont);
164 return;
165 default:
166 report_illegal_transition("humongous continuation allocation");
167 }
168 }
169
170 void ShenandoahHeapRegion::make_humongous_cont_bypass() {
171 shenandoah_assert_heaplocked();
172 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
173
174 switch (_state) {
175 case _empty_committed:
176 case _regular:
177 case _humongous_start:
178 case _humongous_cont:
179 set_state(_humongous_cont);
180 return;
181 default:
182 report_illegal_transition("humongous continuation bypass");
183 }
184 }
185
186 void ShenandoahHeapRegion::make_pinned() {
187 shenandoah_assert_heaplocked();
188 assert(pin_count() > 0, "Should have pins: " SIZE_FORMAT, pin_count());
189
190 switch (_state) {
191 case _regular:
192 set_state(_pinned);
193 case _pinned_cset:
194 case _pinned:
195 return;
196 case _humongous_start:
197 set_state(_pinned_humongous_start);
198 case _pinned_humongous_start:
199 return;
200 case _cset:
201 _state = _pinned_cset;
202 return;
203 default:
204 report_illegal_transition("pinning");
205 }
206 }
207
208 void ShenandoahHeapRegion::make_unpinned() {
209 shenandoah_assert_heaplocked();
210 assert(pin_count() == 0, "Should not have pins: " SIZE_FORMAT, pin_count());
211
212 switch (_state) {
213 case _pinned:
214 set_state(_regular);
215 return;
216 case _regular:
217 case _humongous_start:
218 return;
219 case _pinned_cset:
220 set_state(_cset);
221 return;
222 case _pinned_humongous_start:
223 set_state(_humongous_start);
224 return;
225 default:
226 report_illegal_transition("unpinning");
227 }
228 }
229
230 void ShenandoahHeapRegion::make_cset() {
231 shenandoah_assert_heaplocked();
232 switch (_state) {
233 case _regular:
234 set_state(_cset);
235 case _cset:
236 return;
237 default:
238 report_illegal_transition("cset");
239 }
240 }
241
242 void ShenandoahHeapRegion::make_trash() {
243 shenandoah_assert_heaplocked();
244 switch (_state) {
245 case _cset:
246 // Reclaiming cset regions
247 case _humongous_start:
248 case _humongous_cont:
249 // Reclaiming humongous regions
250 case _regular:
251 // Immediate region reclaim
252 set_state(_trash);
253 return;
254 default:
255 report_illegal_transition("trashing");
256 }
257 }
258
259 void ShenandoahHeapRegion::make_trash_immediate() {
260 make_trash();
261
262 // On this path, we know there are no marked objects in the region,
263 // tell marking context about it to bypass bitmap resets.
264 ShenandoahHeap::heap()->complete_marking_context()->reset_top_bitmap(this);
265 }
266
267 void ShenandoahHeapRegion::make_empty() {
268 shenandoah_assert_heaplocked();
269 switch (_state) {
270 case _trash:
271 set_state(_empty_committed);
272 _empty_time = os::elapsedTime();
273 return;
274 default:
275 report_illegal_transition("emptying");
276 }
277 }
278
279 void ShenandoahHeapRegion::make_uncommitted() {
280 shenandoah_assert_heaplocked();
281 switch (_state) {
282 case _empty_committed:
283 do_uncommit();
284 set_state(_empty_uncommitted);
285 return;
286 default:
287 report_illegal_transition("uncommiting");
288 }
289 }
290
291 void ShenandoahHeapRegion::make_committed_bypass() {
292 shenandoah_assert_heaplocked();
293 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
294
295 switch (_state) {
296 case _empty_uncommitted:
297 do_commit();
298 set_state(_empty_committed);
299 return;
300 default:
301 report_illegal_transition("commit bypass");
302 }
303 }
304
305 void ShenandoahHeapRegion::reset_alloc_metadata() {
306 _tlab_allocs = 0;
307 _gclab_allocs = 0;
308 }
309
310 size_t ShenandoahHeapRegion::get_shared_allocs() const {
311 return used() - (_tlab_allocs + _gclab_allocs) * HeapWordSize;
312 }
313
314 size_t ShenandoahHeapRegion::get_tlab_allocs() const {
315 return _tlab_allocs * HeapWordSize;
316 }
317
318 size_t ShenandoahHeapRegion::get_gclab_allocs() const {
319 return _gclab_allocs * HeapWordSize;
320 }
321
322 void ShenandoahHeapRegion::set_live_data(size_t s) {
323 assert(Thread::current()->is_VM_thread(), "by VM thread");
324 _live_data = (s >> LogHeapWordSize);
325 }
326
327 void ShenandoahHeapRegion::print_on(outputStream* st) const {
328 st->print("|");
329 st->print(SIZE_FORMAT_W(5), this->_index);
330
331 switch (_state) {
332 case _empty_uncommitted:
333 st->print("|EU ");
334 break;
335 case _empty_committed:
336 st->print("|EC ");
337 break;
338 case _regular:
339 st->print("|R ");
340 break;
341 case _humongous_start:
346 break;
347 case _humongous_cont:
348 st->print("|HC ");
349 break;
350 case _cset:
351 st->print("|CS ");
352 break;
353 case _trash:
354 st->print("|TR ");
355 break;
356 case _pinned:
357 st->print("|P ");
358 break;
359 case _pinned_cset:
360 st->print("|CSP");
361 break;
362 default:
363 ShouldNotReachHere();
364 }
365
366 #define SHR_PTR_FORMAT "%12" PRIxPTR
367
368 st->print("|BTE " SHR_PTR_FORMAT ", " SHR_PTR_FORMAT ", " SHR_PTR_FORMAT,
369 p2i(bottom()), p2i(top()), p2i(end()));
370 st->print("|TAMS " SHR_PTR_FORMAT,
371 p2i(ShenandoahHeap::heap()->marking_context()->top_at_mark_start(const_cast<ShenandoahHeapRegion*>(this))));
372 st->print("|UWM " SHR_PTR_FORMAT,
373 p2i(_update_watermark));
374 st->print("|U " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(used()), proper_unit_for_byte_size(used()));
375 st->print("|T " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_tlab_allocs()), proper_unit_for_byte_size(get_tlab_allocs()));
376 st->print("|G " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_gclab_allocs()), proper_unit_for_byte_size(get_gclab_allocs()));
377 st->print("|S " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_shared_allocs()), proper_unit_for_byte_size(get_shared_allocs()));
378 st->print("|L " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_live_data_bytes()), proper_unit_for_byte_size(get_live_data_bytes()));
379 st->print("|CP " SIZE_FORMAT_W(3), pin_count());
380 st->cr();
381
382 #undef SHR_PTR_FORMAT
383 }
384
385 void ShenandoahHeapRegion::oop_iterate(OopIterateClosure* blk) {
386 if (!is_active()) return;
387 if (is_humongous()) {
388 oop_iterate_humongous(blk);
389 } else {
390 oop_iterate_objects(blk);
391 }
392 }
393
394 void ShenandoahHeapRegion::oop_iterate_objects(OopIterateClosure* blk) {
395 assert(! is_humongous(), "no humongous region here");
396 HeapWord* obj_addr = bottom();
397 HeapWord* t = top();
398 // Could call objects iterate, but this is easier.
399 while (obj_addr < t) {
400 oop obj = cast_to_oop(obj_addr);
401 obj_addr += obj->oop_iterate_size(blk);
402 }
403 }
404
405 void ShenandoahHeapRegion::oop_iterate_humongous(OopIterateClosure* blk) {
406 assert(is_humongous(), "only humongous region here");
407 // Find head.
408 ShenandoahHeapRegion* r = humongous_start_region();
409 assert(r->is_humongous_start(), "need humongous head here");
410 oop obj = cast_to_oop(r->bottom());
411 obj->oop_iterate(blk, MemRegion(bottom(), top()));
412 }
413
414 ShenandoahHeapRegion* ShenandoahHeapRegion::humongous_start_region() const {
415 ShenandoahHeap* heap = ShenandoahHeap::heap();
416 assert(is_humongous(), "Must be a part of the humongous region");
417 size_t i = index();
418 ShenandoahHeapRegion* r = const_cast<ShenandoahHeapRegion*>(this);
419 while (!r->is_humongous_start()) {
420 assert(i > 0, "Sanity");
421 i--;
422 r = heap->get_region(i);
423 assert(r->is_humongous(), "Must be a part of the humongous region");
424 }
425 assert(r->is_humongous_start(), "Must be");
426 return r;
427 }
428
429 void ShenandoahHeapRegion::recycle() {
430 set_top(bottom());
431 clear_live_data();
432
433 reset_alloc_metadata();
434
435 ShenandoahHeap::heap()->marking_context()->reset_top_at_mark_start(this);
436 set_update_watermark(bottom());
437
438 make_empty();
439
440 if (ZapUnusedHeapArea) {
441 SpaceMangler::mangle_region(MemRegion(bottom(), end()));
442 }
443 }
444
445 HeapWord* ShenandoahHeapRegion::block_start(const void* p) const {
446 assert(MemRegion(bottom(), end()).contains(p),
447 "p (" PTR_FORMAT ") not in space [" PTR_FORMAT ", " PTR_FORMAT ")",
448 p2i(p), p2i(bottom()), p2i(end()));
449 if (p >= top()) {
450 return top();
451 } else {
452 HeapWord* last = bottom();
453 HeapWord* cur = last;
454 while (cur <= p) {
455 last = cur;
456 cur += cast_to_oop(cur)->size();
457 }
458 shenandoah_assert_correct(nullptr, cast_to_oop(last));
459 return last;
463 size_t ShenandoahHeapRegion::block_size(const HeapWord* p) const {
464 assert(MemRegion(bottom(), end()).contains(p),
465 "p (" PTR_FORMAT ") not in space [" PTR_FORMAT ", " PTR_FORMAT ")",
466 p2i(p), p2i(bottom()), p2i(end()));
467 if (p < top()) {
468 return cast_to_oop(p)->size();
469 } else {
470 assert(p == top(), "just checking");
471 return pointer_delta(end(), (HeapWord*) p);
472 }
473 }
474
475 size_t ShenandoahHeapRegion::setup_sizes(size_t max_heap_size) {
476 // Absolute minimums we should not ever break.
477 static const size_t MIN_REGION_SIZE = 256*K;
478
479 if (FLAG_IS_DEFAULT(ShenandoahMinRegionSize)) {
480 FLAG_SET_DEFAULT(ShenandoahMinRegionSize, MIN_REGION_SIZE);
481 }
482
483 size_t region_size;
484 if (FLAG_IS_DEFAULT(ShenandoahRegionSize)) {
485 if (ShenandoahMinRegionSize > max_heap_size / MIN_NUM_REGIONS) {
486 err_msg message("Max heap size (" SIZE_FORMAT "%s) is too low to afford the minimum number "
487 "of regions (" SIZE_FORMAT ") of minimum region size (" SIZE_FORMAT "%s).",
488 byte_size_in_proper_unit(max_heap_size), proper_unit_for_byte_size(max_heap_size),
489 MIN_NUM_REGIONS,
490 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize));
491 vm_exit_during_initialization("Invalid -XX:ShenandoahMinRegionSize option", message);
492 }
493 if (ShenandoahMinRegionSize < MIN_REGION_SIZE) {
494 err_msg message("" SIZE_FORMAT "%s should not be lower than minimum region size (" SIZE_FORMAT "%s).",
495 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize),
496 byte_size_in_proper_unit(MIN_REGION_SIZE), proper_unit_for_byte_size(MIN_REGION_SIZE));
497 vm_exit_during_initialization("Invalid -XX:ShenandoahMinRegionSize option", message);
498 }
499 if (ShenandoahMinRegionSize < MinTLABSize) {
500 err_msg message("" SIZE_FORMAT "%s should not be lower than TLAB size size (" SIZE_FORMAT "%s).",
501 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize),
502 byte_size_in_proper_unit(MinTLABSize), proper_unit_for_byte_size(MinTLABSize));
641 evt.set_used(used());
642 evt.set_from(_state);
643 evt.set_to(to);
644 evt.commit();
645 }
646 _state = to;
647 }
648
649 void ShenandoahHeapRegion::record_pin() {
650 Atomic::add(&_critical_pins, (size_t)1);
651 }
652
653 void ShenandoahHeapRegion::record_unpin() {
654 assert(pin_count() > 0, "Region " SIZE_FORMAT " should have non-zero pins", index());
655 Atomic::sub(&_critical_pins, (size_t)1);
656 }
657
658 size_t ShenandoahHeapRegion::pin_count() const {
659 return Atomic::load(&_critical_pins);
660 }
|
1 /*
2 * Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2013, 2020, Red Hat, Inc. All rights reserved.
4 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
5 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 *
7 * This code is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License version 2 only, as
9 * published by the Free Software Foundation.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 *
25 */
26
27 #include "precompiled.hpp"
28 #include "gc/shared/cardTable.hpp"
29 #include "gc/shared/space.hpp"
30 #include "gc/shared/tlab_globals.hpp"
31 #include "gc/shenandoah/shenandoahCardTable.hpp"
32 #include "gc/shenandoah/shenandoahFreeSet.hpp"
33 #include "gc/shenandoah/shenandoahHeapRegionSet.inline.hpp"
34 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
35 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
36 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
37 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
38 #include "gc/shenandoah/shenandoahGeneration.hpp"
39 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
40 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
41 #include "jfr/jfrEvents.hpp"
42 #include "memory/allocation.hpp"
43 #include "memory/iterator.inline.hpp"
44 #include "memory/resourceArea.hpp"
45 #include "memory/universe.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "runtime/atomic.hpp"
48 #include "runtime/globals_extension.hpp"
49 #include "runtime/java.hpp"
50 #include "runtime/mutexLocker.hpp"
51 #include "runtime/os.hpp"
52 #include "runtime/safepoint.hpp"
53 #include "utilities/powerOfTwo.hpp"
54
55 size_t ShenandoahHeapRegion::RegionCount = 0;
56 size_t ShenandoahHeapRegion::RegionSizeBytes = 0;
57 size_t ShenandoahHeapRegion::RegionSizeWords = 0;
58 size_t ShenandoahHeapRegion::RegionSizeBytesShift = 0;
59 size_t ShenandoahHeapRegion::RegionSizeWordsShift = 0;
60 size_t ShenandoahHeapRegion::RegionSizeBytesMask = 0;
61 size_t ShenandoahHeapRegion::RegionSizeWordsMask = 0;
62 size_t ShenandoahHeapRegion::MaxTLABSizeBytes = 0;
63 size_t ShenandoahHeapRegion::MaxTLABSizeWords = 0;
64
65 ShenandoahHeapRegion::ShenandoahHeapRegion(HeapWord* start, size_t index, bool committed) :
66 _index(index),
67 _bottom(start),
68 _end(start + RegionSizeWords),
69 _new_top(nullptr),
70 _empty_time(os::elapsedTime()),
71 _top_before_promoted(nullptr),
72 _state(committed ? _empty_committed : _empty_uncommitted),
73 _top(start),
74 _tlab_allocs(0),
75 _gclab_allocs(0),
76 _plab_allocs(0),
77 _live_data(0),
78 _critical_pins(0),
79 _update_watermark(start),
80 _age(0)
81 #ifdef SHENANDOAH_CENSUS_NOISE
82 , _youth(0)
83 #endif // SHENANDOAH_CENSUS_NOISE
84 {
85
86 assert(Universe::on_page_boundary(_bottom) && Universe::on_page_boundary(_end),
87 "invalid space boundaries");
88 if (ZapUnusedHeapArea && committed) {
89 SpaceMangler::mangle_region(MemRegion(_bottom, _end));
90 }
91 }
92
93 void ShenandoahHeapRegion::report_illegal_transition(const char *method) {
94 stringStream ss;
95 ss.print("Illegal region state transition from \"%s\", at %s\n ", region_state_to_string(_state), method);
96 print_on(&ss);
97 fatal("%s", ss.freeze());
98 }
99
100 void ShenandoahHeapRegion::make_regular_allocation(ShenandoahAffiliation affiliation) {
101 shenandoah_assert_heaplocked();
102 reset_age();
103 switch (_state) {
104 case _empty_uncommitted:
105 do_commit();
106 case _empty_committed:
107 assert(this->affiliation() == affiliation, "Region affiliation should already be established");
108 set_state(_regular);
109 case _regular:
110 case _pinned:
111 return;
112 default:
113 report_illegal_transition("regular allocation");
114 }
115 }
116
117 // Change affiliation to YOUNG_GENERATION if _state is not _pinned_cset, _regular, or _pinned. This implements
118 // behavior previously performed as a side effect of make_regular_bypass(). This is used by Full GC in non-generational
119 // modes to transition regions from FREE. Note that all non-free regions in single-generational modes are young.
120 void ShenandoahHeapRegion::make_affiliated_maybe() {
121 shenandoah_assert_heaplocked();
122 assert(!ShenandoahHeap::heap()->mode()->is_generational(), "Only call if non-generational");
123 switch (_state) {
124 case _empty_uncommitted:
125 case _empty_committed:
126 case _cset:
127 case _humongous_start:
128 case _humongous_cont:
129 if (affiliation() != YOUNG_GENERATION) {
130 set_affiliation(YOUNG_GENERATION);
131 }
132 return;
133 case _pinned_cset:
134 case _regular:
135 case _pinned:
136 return;
137 default:
138 assert(false, "Unexpected _state in make_affiliated_maybe");
139 }
140 }
141
142 void ShenandoahHeapRegion::make_regular_bypass() {
143 shenandoah_assert_heaplocked();
144 assert (!Universe::is_fully_initialized() ||
145 ShenandoahHeap::heap()->is_full_gc_in_progress() ||
146 ShenandoahHeap::heap()->is_degenerated_gc_in_progress(),
147 "Only for STW GC or when Universe is initializing (CDS)");
148 reset_age();
149 switch (_state) {
150 case _empty_uncommitted:
151 do_commit();
152 case _empty_committed:
153 case _cset:
154 case _humongous_start:
155 case _humongous_cont:
156 if (_state == _humongous_start || _state == _humongous_cont) {
157 // CDS allocates chunks of the heap to fill with regular objects. The allocator
158 // will dutifully track any waste in the unused portion of the last region. Once
159 // CDS has finished initializing the objects, it will convert these regions to
160 // regular regions. The 'waste' in the last region is no longer wasted at this point,
161 // so we must stop treating it as such.
162 decrement_humongous_waste();
163 }
164 set_state(_regular);
165 return;
166 case _pinned_cset:
167 set_state(_pinned);
168 return;
169 case _regular:
170 case _pinned:
171 return;
172 default:
173 report_illegal_transition("regular bypass");
174 }
175 }
176
177 void ShenandoahHeapRegion::make_humongous_start() {
178 shenandoah_assert_heaplocked();
179 reset_age();
180 switch (_state) {
181 case _empty_uncommitted:
182 do_commit();
183 case _empty_committed:
184 set_state(_humongous_start);
185 return;
186 default:
187 report_illegal_transition("humongous start allocation");
188 }
189 }
190
191 void ShenandoahHeapRegion::make_humongous_start_bypass(ShenandoahAffiliation affiliation) {
192 shenandoah_assert_heaplocked();
193 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
194 // Don't bother to account for affiliated regions during Full GC. We recompute totals at end.
195 set_affiliation(affiliation);
196 reset_age();
197 switch (_state) {
198 case _empty_committed:
199 case _regular:
200 case _humongous_start:
201 case _humongous_cont:
202 set_state(_humongous_start);
203 return;
204 default:
205 report_illegal_transition("humongous start bypass");
206 }
207 }
208
209 void ShenandoahHeapRegion::make_humongous_cont() {
210 shenandoah_assert_heaplocked();
211 reset_age();
212 switch (_state) {
213 case _empty_uncommitted:
214 do_commit();
215 case _empty_committed:
216 set_state(_humongous_cont);
217 return;
218 default:
219 report_illegal_transition("humongous continuation allocation");
220 }
221 }
222
223 void ShenandoahHeapRegion::make_humongous_cont_bypass(ShenandoahAffiliation affiliation) {
224 shenandoah_assert_heaplocked();
225 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
226 set_affiliation(affiliation);
227 // Don't bother to account for affiliated regions during Full GC. We recompute totals at end.
228 reset_age();
229 switch (_state) {
230 case _empty_committed:
231 case _regular:
232 case _humongous_start:
233 case _humongous_cont:
234 set_state(_humongous_cont);
235 return;
236 default:
237 report_illegal_transition("humongous continuation bypass");
238 }
239 }
240
241 void ShenandoahHeapRegion::make_pinned() {
242 shenandoah_assert_heaplocked();
243 assert(pin_count() > 0, "Should have pins: " SIZE_FORMAT, pin_count());
244
245 switch (_state) {
246 case _regular:
247 set_state(_pinned);
248 case _pinned_cset:
249 case _pinned:
250 return;
251 case _humongous_start:
252 set_state(_pinned_humongous_start);
253 case _pinned_humongous_start:
254 return;
255 case _cset:
256 _state = _pinned_cset;
257 return;
258 default:
259 report_illegal_transition("pinning");
260 }
261 }
262
263 void ShenandoahHeapRegion::make_unpinned() {
264 shenandoah_assert_heaplocked();
265 assert(pin_count() == 0, "Should not have pins: " SIZE_FORMAT, pin_count());
266
267 switch (_state) {
268 case _pinned:
269 assert(is_affiliated(), "Pinned region should be affiliated");
270 set_state(_regular);
271 return;
272 case _regular:
273 case _humongous_start:
274 return;
275 case _pinned_cset:
276 set_state(_cset);
277 return;
278 case _pinned_humongous_start:
279 set_state(_humongous_start);
280 return;
281 default:
282 report_illegal_transition("unpinning");
283 }
284 }
285
286 void ShenandoahHeapRegion::make_cset() {
287 shenandoah_assert_heaplocked();
288 // Leave age untouched. We need to consult the age when we are deciding whether to promote evacuated objects.
289 switch (_state) {
290 case _regular:
291 set_state(_cset);
292 case _cset:
293 return;
294 default:
295 report_illegal_transition("cset");
296 }
297 }
298
299 void ShenandoahHeapRegion::make_trash() {
300 shenandoah_assert_heaplocked();
301 reset_age();
302 switch (_state) {
303 case _humongous_start:
304 case _humongous_cont:
305 {
306 // Reclaiming humongous regions and reclaim humongous waste. When this region is eventually recycled, we'll reclaim
307 // its used memory. At recycle time, we no longer recognize this as a humongous region.
308 decrement_humongous_waste();
309 }
310 case _cset:
311 // Reclaiming cset regions
312 case _regular:
313 // Immediate region reclaim
314 set_state(_trash);
315 return;
316 default:
317 report_illegal_transition("trashing");
318 }
319 }
320
321 void ShenandoahHeapRegion::make_trash_immediate() {
322 make_trash();
323
324 // On this path, we know there are no marked objects in the region,
325 // tell marking context about it to bypass bitmap resets.
326 assert(ShenandoahHeap::heap()->gc_generation()->is_mark_complete(), "Marking should be complete here.");
327 shenandoah_assert_generations_reconciled();
328 ShenandoahHeap::heap()->marking_context()->reset_top_bitmap(this);
329 }
330
331 void ShenandoahHeapRegion::make_empty() {
332 shenandoah_assert_heaplocked();
333 reset_age();
334 CENSUS_NOISE(clear_youth();)
335 switch (_state) {
336 case _trash:
337 set_state(_empty_committed);
338 _empty_time = os::elapsedTime();
339 return;
340 default:
341 report_illegal_transition("emptying");
342 }
343 }
344
345 void ShenandoahHeapRegion::make_uncommitted() {
346 shenandoah_assert_heaplocked();
347 switch (_state) {
348 case _empty_committed:
349 do_uncommit();
350 set_state(_empty_uncommitted);
351 return;
352 default:
353 report_illegal_transition("uncommiting");
354 }
355 }
356
357 void ShenandoahHeapRegion::make_committed_bypass() {
358 shenandoah_assert_heaplocked();
359 assert (ShenandoahHeap::heap()->is_full_gc_in_progress(), "only for full GC");
360
361 switch (_state) {
362 case _empty_uncommitted:
363 do_commit();
364 set_state(_empty_committed);
365 return;
366 default:
367 report_illegal_transition("commit bypass");
368 }
369 }
370
371 void ShenandoahHeapRegion::reset_alloc_metadata() {
372 _tlab_allocs = 0;
373 _gclab_allocs = 0;
374 _plab_allocs = 0;
375 }
376
377 size_t ShenandoahHeapRegion::get_shared_allocs() const {
378 return used() - (_tlab_allocs + _gclab_allocs + _plab_allocs) * HeapWordSize;
379 }
380
381 size_t ShenandoahHeapRegion::get_tlab_allocs() const {
382 return _tlab_allocs * HeapWordSize;
383 }
384
385 size_t ShenandoahHeapRegion::get_gclab_allocs() const {
386 return _gclab_allocs * HeapWordSize;
387 }
388
389 size_t ShenandoahHeapRegion::get_plab_allocs() const {
390 return _plab_allocs * HeapWordSize;
391 }
392
393 void ShenandoahHeapRegion::set_live_data(size_t s) {
394 assert(Thread::current()->is_VM_thread(), "by VM thread");
395 _live_data = (s >> LogHeapWordSize);
396 }
397
398 void ShenandoahHeapRegion::print_on(outputStream* st) const {
399 st->print("|");
400 st->print(SIZE_FORMAT_W(5), this->_index);
401
402 switch (_state) {
403 case _empty_uncommitted:
404 st->print("|EU ");
405 break;
406 case _empty_committed:
407 st->print("|EC ");
408 break;
409 case _regular:
410 st->print("|R ");
411 break;
412 case _humongous_start:
417 break;
418 case _humongous_cont:
419 st->print("|HC ");
420 break;
421 case _cset:
422 st->print("|CS ");
423 break;
424 case _trash:
425 st->print("|TR ");
426 break;
427 case _pinned:
428 st->print("|P ");
429 break;
430 case _pinned_cset:
431 st->print("|CSP");
432 break;
433 default:
434 ShouldNotReachHere();
435 }
436
437 st->print("|%s", shenandoah_affiliation_code(affiliation()));
438
439 #define SHR_PTR_FORMAT "%12" PRIxPTR
440
441 st->print("|BTE " SHR_PTR_FORMAT ", " SHR_PTR_FORMAT ", " SHR_PTR_FORMAT,
442 p2i(bottom()), p2i(top()), p2i(end()));
443 st->print("|TAMS " SHR_PTR_FORMAT,
444 p2i(ShenandoahHeap::heap()->marking_context()->top_at_mark_start(const_cast<ShenandoahHeapRegion*>(this))));
445 st->print("|UWM " SHR_PTR_FORMAT,
446 p2i(_update_watermark));
447 st->print("|U " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(used()), proper_unit_for_byte_size(used()));
448 st->print("|T " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_tlab_allocs()), proper_unit_for_byte_size(get_tlab_allocs()));
449 st->print("|G " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_gclab_allocs()), proper_unit_for_byte_size(get_gclab_allocs()));
450 if (ShenandoahHeap::heap()->mode()->is_generational()) {
451 st->print("|P " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_plab_allocs()), proper_unit_for_byte_size(get_plab_allocs()));
452 }
453 st->print("|S " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_shared_allocs()), proper_unit_for_byte_size(get_shared_allocs()));
454 st->print("|L " SIZE_FORMAT_W(5) "%1s", byte_size_in_proper_unit(get_live_data_bytes()), proper_unit_for_byte_size(get_live_data_bytes()));
455 st->print("|CP " SIZE_FORMAT_W(3), pin_count());
456 st->cr();
457
458 #undef SHR_PTR_FORMAT
459 }
460
461 // oop_iterate without closure, return true if completed without cancellation
462 bool ShenandoahHeapRegion::oop_coalesce_and_fill(bool cancellable) {
463
464 assert(!is_humongous(), "No need to fill or coalesce humongous regions");
465 if (!is_active()) {
466 end_preemptible_coalesce_and_fill();
467 return true;
468 }
469
470 ShenandoahGenerationalHeap* heap = ShenandoahGenerationalHeap::heap();
471 ShenandoahMarkingContext* marking_context = heap->marking_context();
472
473 // Expect marking to be completed before these threads invoke this service.
474 assert(heap->gc_generation()->is_mark_complete(), "sanity");
475 shenandoah_assert_generations_reconciled();
476
477 // All objects above TAMS are considered live even though their mark bits will not be set. Note that young-
478 // gen evacuations that interrupt a long-running old-gen concurrent mark may promote objects into old-gen
479 // while the old-gen concurrent marking is ongoing. These newly promoted objects will reside above TAMS
480 // and will be treated as live during the current old-gen marking pass, even though they will not be
481 // explicitly marked.
482 HeapWord* t = marking_context->top_at_mark_start(this);
483
484 // Resume coalesce and fill from this address
485 HeapWord* obj_addr = resume_coalesce_and_fill();
486
487 while (obj_addr < t) {
488 oop obj = cast_to_oop(obj_addr);
489 if (marking_context->is_marked(obj)) {
490 assert(obj->klass() != nullptr, "klass should not be nullptr");
491 obj_addr += obj->size();
492 } else {
493 // Object is not marked. Coalesce and fill dead object with dead neighbors.
494 HeapWord* next_marked_obj = marking_context->get_next_marked_addr(obj_addr, t);
495 assert(next_marked_obj <= t, "next marked object cannot exceed top");
496 size_t fill_size = next_marked_obj - obj_addr;
497 assert(fill_size >= ShenandoahHeap::min_fill_size(), "previously allocated object known to be larger than min_size");
498 ShenandoahHeap::fill_with_object(obj_addr, fill_size);
499 heap->old_generation()->card_scan()->coalesce_objects(obj_addr, fill_size);
500 obj_addr = next_marked_obj;
501 }
502 if (cancellable && heap->cancelled_gc()) {
503 suspend_coalesce_and_fill(obj_addr);
504 return false;
505 }
506 }
507 // Mark that this region has been coalesced and filled
508 end_preemptible_coalesce_and_fill();
509 return true;
510 }
511
512 size_t get_card_count(size_t words) {
513 assert(words % CardTable::card_size_in_words() == 0, "Humongous iteration must span whole number of cards");
514 assert(CardTable::card_size_in_words() * (words / CardTable::card_size_in_words()) == words,
515 "slice must be integral number of cards");
516 return words / CardTable::card_size_in_words();
517 }
518
519 void ShenandoahHeapRegion::oop_iterate_humongous_slice_dirty(OopIterateClosure* blk,
520 HeapWord* start, size_t words, bool write_table) const {
521 assert(is_humongous(), "only humongous region here");
522
523 ShenandoahHeapRegion* r = humongous_start_region();
524 oop obj = cast_to_oop(r->bottom());
525 size_t num_cards = get_card_count(words);
526
527 ShenandoahGenerationalHeap* heap = ShenandoahGenerationalHeap::heap();
528 ShenandoahScanRemembered* scanner = heap->old_generation()->card_scan();
529 size_t card_index = scanner->card_index_for_addr(start);
530 if (write_table) {
531 while (num_cards-- > 0) {
532 if (scanner->is_write_card_dirty(card_index++)) {
533 obj->oop_iterate(blk, MemRegion(start, start + CardTable::card_size_in_words()));
534 }
535 start += CardTable::card_size_in_words();
536 }
537 } else {
538 while (num_cards-- > 0) {
539 if (scanner->is_card_dirty(card_index++)) {
540 obj->oop_iterate(blk, MemRegion(start, start + CardTable::card_size_in_words()));
541 }
542 start += CardTable::card_size_in_words();
543 }
544 }
545 }
546
547 void ShenandoahHeapRegion::oop_iterate_humongous_slice_all(OopIterateClosure* cl, HeapWord* start, size_t words) const {
548 assert(is_humongous(), "only humongous region here");
549
550 ShenandoahHeapRegion* r = humongous_start_region();
551 oop obj = cast_to_oop(r->bottom());
552
553 // Scan all data, regardless of whether cards are dirty
554 obj->oop_iterate(cl, MemRegion(start, start + words));
555 }
556
557 ShenandoahHeapRegion* ShenandoahHeapRegion::humongous_start_region() const {
558 ShenandoahHeap* heap = ShenandoahHeap::heap();
559 assert(is_humongous(), "Must be a part of the humongous region");
560 size_t i = index();
561 ShenandoahHeapRegion* r = const_cast<ShenandoahHeapRegion*>(this);
562 while (!r->is_humongous_start()) {
563 assert(i > 0, "Sanity");
564 i--;
565 r = heap->get_region(i);
566 assert(r->is_humongous(), "Must be a part of the humongous region");
567 }
568 assert(r->is_humongous_start(), "Must be");
569 return r;
570 }
571
572 void ShenandoahHeapRegion::recycle() {
573 shenandoah_assert_heaplocked();
574 ShenandoahHeap* heap = ShenandoahHeap::heap();
575 ShenandoahGeneration* generation = heap->generation_for(affiliation());
576
577 heap->decrease_used(generation, used());
578 generation->decrement_affiliated_region_count();
579
580 set_top(bottom());
581 clear_live_data();
582 reset_alloc_metadata();
583
584 heap->marking_context()->reset_top_at_mark_start(this);
585
586 set_update_watermark(bottom());
587
588 make_empty();
589
590 set_affiliation(FREE);
591 if (ZapUnusedHeapArea) {
592 SpaceMangler::mangle_region(MemRegion(bottom(), end()));
593 }
594 }
595
596 HeapWord* ShenandoahHeapRegion::block_start(const void* p) const {
597 assert(MemRegion(bottom(), end()).contains(p),
598 "p (" PTR_FORMAT ") not in space [" PTR_FORMAT ", " PTR_FORMAT ")",
599 p2i(p), p2i(bottom()), p2i(end()));
600 if (p >= top()) {
601 return top();
602 } else {
603 HeapWord* last = bottom();
604 HeapWord* cur = last;
605 while (cur <= p) {
606 last = cur;
607 cur += cast_to_oop(cur)->size();
608 }
609 shenandoah_assert_correct(nullptr, cast_to_oop(last));
610 return last;
614 size_t ShenandoahHeapRegion::block_size(const HeapWord* p) const {
615 assert(MemRegion(bottom(), end()).contains(p),
616 "p (" PTR_FORMAT ") not in space [" PTR_FORMAT ", " PTR_FORMAT ")",
617 p2i(p), p2i(bottom()), p2i(end()));
618 if (p < top()) {
619 return cast_to_oop(p)->size();
620 } else {
621 assert(p == top(), "just checking");
622 return pointer_delta(end(), (HeapWord*) p);
623 }
624 }
625
626 size_t ShenandoahHeapRegion::setup_sizes(size_t max_heap_size) {
627 // Absolute minimums we should not ever break.
628 static const size_t MIN_REGION_SIZE = 256*K;
629
630 if (FLAG_IS_DEFAULT(ShenandoahMinRegionSize)) {
631 FLAG_SET_DEFAULT(ShenandoahMinRegionSize, MIN_REGION_SIZE);
632 }
633
634 // Generational Shenandoah needs this alignment for card tables.
635 if (strcmp(ShenandoahGCMode, "generational") == 0) {
636 max_heap_size = align_up(max_heap_size , CardTable::ct_max_alignment_constraint());
637 }
638
639 size_t region_size;
640 if (FLAG_IS_DEFAULT(ShenandoahRegionSize)) {
641 if (ShenandoahMinRegionSize > max_heap_size / MIN_NUM_REGIONS) {
642 err_msg message("Max heap size (" SIZE_FORMAT "%s) is too low to afford the minimum number "
643 "of regions (" SIZE_FORMAT ") of minimum region size (" SIZE_FORMAT "%s).",
644 byte_size_in_proper_unit(max_heap_size), proper_unit_for_byte_size(max_heap_size),
645 MIN_NUM_REGIONS,
646 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize));
647 vm_exit_during_initialization("Invalid -XX:ShenandoahMinRegionSize option", message);
648 }
649 if (ShenandoahMinRegionSize < MIN_REGION_SIZE) {
650 err_msg message("" SIZE_FORMAT "%s should not be lower than minimum region size (" SIZE_FORMAT "%s).",
651 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize),
652 byte_size_in_proper_unit(MIN_REGION_SIZE), proper_unit_for_byte_size(MIN_REGION_SIZE));
653 vm_exit_during_initialization("Invalid -XX:ShenandoahMinRegionSize option", message);
654 }
655 if (ShenandoahMinRegionSize < MinTLABSize) {
656 err_msg message("" SIZE_FORMAT "%s should not be lower than TLAB size size (" SIZE_FORMAT "%s).",
657 byte_size_in_proper_unit(ShenandoahMinRegionSize), proper_unit_for_byte_size(ShenandoahMinRegionSize),
658 byte_size_in_proper_unit(MinTLABSize), proper_unit_for_byte_size(MinTLABSize));
797 evt.set_used(used());
798 evt.set_from(_state);
799 evt.set_to(to);
800 evt.commit();
801 }
802 _state = to;
803 }
804
805 void ShenandoahHeapRegion::record_pin() {
806 Atomic::add(&_critical_pins, (size_t)1);
807 }
808
809 void ShenandoahHeapRegion::record_unpin() {
810 assert(pin_count() > 0, "Region " SIZE_FORMAT " should have non-zero pins", index());
811 Atomic::sub(&_critical_pins, (size_t)1);
812 }
813
814 size_t ShenandoahHeapRegion::pin_count() const {
815 return Atomic::load(&_critical_pins);
816 }
817
818 void ShenandoahHeapRegion::set_affiliation(ShenandoahAffiliation new_affiliation) {
819 ShenandoahHeap* heap = ShenandoahHeap::heap();
820
821 ShenandoahAffiliation region_affiliation = heap->region_affiliation(this);
822 {
823 ShenandoahMarkingContext* const ctx = heap->complete_marking_context();
824 log_debug(gc)("Setting affiliation of Region " SIZE_FORMAT " from %s to %s, top: " PTR_FORMAT ", TAMS: " PTR_FORMAT
825 ", watermark: " PTR_FORMAT ", top_bitmap: " PTR_FORMAT,
826 index(), shenandoah_affiliation_name(region_affiliation), shenandoah_affiliation_name(new_affiliation),
827 p2i(top()), p2i(ctx->top_at_mark_start(this)), p2i(_update_watermark), p2i(ctx->top_bitmap(this)));
828 }
829
830 #ifdef ASSERT
831 {
832 // During full gc, heap->complete_marking_context() is not valid, may equal nullptr.
833 ShenandoahMarkingContext* const ctx = heap->complete_marking_context();
834 size_t idx = this->index();
835 HeapWord* top_bitmap = ctx->top_bitmap(this);
836
837 assert(ctx->is_bitmap_clear_range(top_bitmap, _end),
838 "Region " SIZE_FORMAT ", bitmap should be clear between top_bitmap: " PTR_FORMAT " and end: " PTR_FORMAT, idx,
839 p2i(top_bitmap), p2i(_end));
840 }
841 #endif
842
843 if (region_affiliation == new_affiliation) {
844 return;
845 }
846
847 if (!heap->mode()->is_generational()) {
848 log_trace(gc)("Changing affiliation of region %zu from %s to %s",
849 index(), affiliation_name(), shenandoah_affiliation_name(new_affiliation));
850 heap->set_affiliation(this, new_affiliation);
851 return;
852 }
853
854 switch (new_affiliation) {
855 case FREE:
856 assert(!has_live(), "Free region should not have live data");
857 break;
858 case YOUNG_GENERATION:
859 reset_age();
860 break;
861 case OLD_GENERATION:
862 break;
863 default:
864 ShouldNotReachHere();
865 return;
866 }
867 heap->set_affiliation(this, new_affiliation);
868 }
869
870 void ShenandoahHeapRegion::decrement_humongous_waste() const {
871 assert(is_humongous(), "Should only use this for humongous regions");
872 size_t waste_bytes = free();
873 if (waste_bytes > 0) {
874 ShenandoahHeap* heap = ShenandoahHeap::heap();
875 ShenandoahGeneration* generation = heap->generation_for(affiliation());
876 heap->decrease_humongous_waste(generation, waste_bytes);
877 }
878 }
|