1 /*
2 * Copyright (c) 2017, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23
24 #include "classfile/classLoaderData.hpp"
25 #include "classfile/classLoaderDataGraph.hpp"
26 #include "gc/shared/barrierSet.hpp"
27 #include "gc/shared/barrierSetNMethod.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/taskqueue.inline.hpp"
30 #include "gc/z/zAddress.inline.hpp"
31 #include "gc/z/zCollectedHeap.hpp"
32 #include "gc/z/zGenerationId.hpp"
33 #include "gc/z/zGlobals.hpp"
34 #include "gc/z/zGranuleMap.inline.hpp"
35 #include "gc/z/zHeap.inline.hpp"
36 #include "gc/z/zHeapIterator.hpp"
37 #include "gc/z/zLock.inline.hpp"
38 #include "gc/z/zNMethod.hpp"
39 #include "memory/iterator.inline.hpp"
40 #include "utilities/bitMap.inline.hpp"
41
42 class ZHeapIteratorBitMap : public CHeapObj<mtGC> {
43 private:
44 CHeapBitMap _bitmap;
45
46 public:
47 ZHeapIteratorBitMap(size_t size_in_bits)
48 : _bitmap(size_in_bits, mtGC) {}
49
50 bool try_set_bit(size_t index) {
51 return _bitmap.par_set_bit(index);
52 }
53 };
54
55 class ZHeapIteratorContext {
56 private:
57 ObjectClosure* const _object_cl;
58 OopFieldClosure* const _field_cl;
59 const uint _worker_id;
60 ZHeapIteratorQueue* const _queue;
61 ZHeapIteratorArrayChunkQueue* const _array_chunk_queue;
62
63 public:
64 ZHeapIteratorContext(ObjectClosure* object_cl,
65 OopFieldClosure* field_cl,
66 uint worker_id,
67 ZHeapIteratorQueue* queue,
68 ZHeapIteratorArrayChunkQueue* array_chunk_queue)
69 : _object_cl(object_cl),
70 _field_cl(field_cl),
71 _worker_id(worker_id),
72 _queue(queue),
73 _array_chunk_queue(array_chunk_queue) {}
74
75 uint worker_id() const {
76 return _worker_id;
77 }
78
79 void visit_field(oop base, oop* p) const {
80 if (_field_cl != nullptr) {
81 _field_cl->do_field(base, p);
82 }
83 }
84
85 void visit_object(oop obj) const {
86 _object_cl->do_object(obj);
87 }
88
89 void push(oop obj) const {
90 _queue->push(obj);
91 }
92
93 void push_array_chunk(const ObjArrayTask& array_chunk) const {
94 _array_chunk_queue->push(array_chunk);
95 }
96
97 bool pop(oop& obj) const {
98 return _queue->pop_overflow(obj) || _queue->pop_local(obj);
99 }
100
101 bool pop_array_chunk(ObjArrayTask& array_chunk) const {
102 return _array_chunk_queue->pop_overflow(array_chunk) || _array_chunk_queue->pop_local(array_chunk);
103 }
104
105 bool is_drained() const {
106 return _queue->is_empty() && _array_chunk_queue->is_empty();
107 }
108 };
109
110 template <bool Weak>
111 class ZHeapIteratorColoredRootOopClosure : public OopClosure {
112 private:
113 ZHeapIterator* const _iter;
114 const ZHeapIteratorContext& _context;
115
116 oop load_oop(oop* p) {
117 if (Weak) {
118 return NativeAccess<AS_NO_KEEPALIVE | ON_PHANTOM_OOP_REF>::oop_load(p);
119 }
120
121 return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
122 }
123
124 public:
125 ZHeapIteratorColoredRootOopClosure(ZHeapIterator* iter,
126 const ZHeapIteratorContext& context)
127 : _iter(iter),
128 _context(context) {}
129
130 virtual void do_oop(oop* p) {
131 _context.visit_field(nullptr, p);
132 const oop obj = load_oop(p);
133 _iter->mark_visit_and_push(_context, obj);
134 }
135
136 virtual void do_oop(narrowOop* p) {
137 ShouldNotReachHere();
138 }
139 };
140
141 class ZHeapIteratorUncoloredRootOopClosure : public OopClosure {
142 private:
143 ZHeapIterator* const _iter;
144 const ZHeapIteratorContext& _context;
145
146 oop load_oop(oop* p) {
147 const oop o = AtomicAccess::load(p);
148 check_is_valid_zaddress(o);
149 return RawAccess<>::oop_load(p);
150 }
151
152 public:
153 ZHeapIteratorUncoloredRootOopClosure(ZHeapIterator* iter,
154 const ZHeapIteratorContext& context)
155 : _iter(iter),
156 _context(context) {}
157
158 virtual void do_oop(oop* p) {
159 _context.visit_field(nullptr, p);
160 const oop obj = load_oop(p);
161 _iter->mark_visit_and_push(_context, obj);
162 }
163
164 virtual void do_oop(narrowOop* p) {
165 ShouldNotReachHere();
166 }
167 };
168
169 class ZHeapIteratorCLDOopClosure : public OopClosure {
170 private:
171 ZHeapIterator* const _iter;
172 const ZHeapIteratorContext& _context;
173
174 oop load_oop(oop* p) {
175 assert(!ZCollectedHeap::heap()->is_in(p), "Should not be in heap");
176 return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
177 }
178
179 public:
180 ZHeapIteratorCLDOopClosure(ZHeapIterator* iter,
181 const ZHeapIteratorContext& context)
182 : _iter(iter),
183 _context(context) {}
184
185 virtual void do_oop(oop* p) {
186 const oop obj = load_oop(p);
187 _iter->mark_visit_and_push(_context, obj);
188 }
189
190 virtual void do_oop(narrowOop* p) {
191 ShouldNotReachHere();
192 }
193 };
194
195 template <bool VisitReferents>
196 class ZHeapIteratorOopClosure : public OopIterateClosure {
197 private:
198 ZHeapIterator* const _iter;
199 const ZHeapIteratorContext& _context;
200 const oop _base;
201
202 oop load_oop(oop* p) {
203 assert(ZCollectedHeap::heap()->is_in(p), "Should be in heap");
204
205 if (VisitReferents) {
206 return HeapAccess<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>::oop_load_at(_base, (ptrdiff_t)_base->field_offset(p));
207 }
208
209 return HeapAccess<AS_NO_KEEPALIVE>::oop_load(p);
210 }
211
212 public:
213 ZHeapIteratorOopClosure(ZHeapIterator* iter,
214 const ZHeapIteratorContext& context,
215 oop base)
216 : OopIterateClosure(),
217 _iter(iter),
218 _context(context),
219 _base(base) {}
220
221 virtual ReferenceIterationMode reference_iteration_mode() {
222 return VisitReferents ? DO_FIELDS : DO_FIELDS_EXCEPT_REFERENT;
223 }
224
225 virtual void do_oop(oop* p) {
226 _context.visit_field(_base, p);
227 const oop obj = load_oop(p);
228 _iter->mark_visit_and_push(_context, obj);
229 }
230
231 virtual void do_oop(narrowOop* p) {
232 ShouldNotReachHere();
233 }
234
235 virtual bool do_metadata() {
236 return true;
237 }
238
239 virtual void do_klass(Klass* k) {
240 ClassLoaderData* const cld = k->class_loader_data();
241 ZHeapIteratorOopClosure::do_cld(cld);
242 }
243
244 virtual void do_cld(ClassLoaderData* cld) {
245 ZHeapIteratorCLDOopClosure cl(_iter, _context);
246 cld->oops_do(&cl, ClassLoaderData::_claim_other);
247 }
248
249 // Don't follow loom stack metadata; it's already followed in other ways through CLDs
250 virtual void do_nmethod(nmethod* nm) {}
251 virtual void do_method(Method* m) {}
252 };
253
254 ZHeapIterator::ZHeapIterator(uint nworkers,
255 bool visit_weaks,
256 bool for_verify)
257 : _visit_weaks(visit_weaks),
258 _for_verify(for_verify),
259 _bitmaps(ZAddressOffsetMax),
260 _bitmaps_lock(),
261 _queues(nworkers),
262 _array_chunk_queues(nworkers),
263 _roots_colored(ZGenerationIdOptional::none),
264 _roots_uncolored(ZGenerationIdOptional::none),
265 _roots_weak_colored(ZGenerationIdOptional::none),
266 _terminator(nworkers, &_queues) {
267
268 // Create queues
269 for (uint i = 0; i < _queues.size(); i++) {
270 ZHeapIteratorQueue* const queue = new ZHeapIteratorQueue();
271 _queues.register_queue(i, queue);
272 }
273
274 // Create array chunk queues
275 for (uint i = 0; i < _array_chunk_queues.size(); i++) {
276 ZHeapIteratorArrayChunkQueue* const array_chunk_queue = new ZHeapIteratorArrayChunkQueue();
277 _array_chunk_queues.register_queue(i, array_chunk_queue);
278 }
279 }
280
281 ZHeapIterator::~ZHeapIterator() {
282 // Destroy bitmaps
283 ZHeapIteratorBitMapsIterator iter(&_bitmaps);
284 for (ZHeapIteratorBitMap* bitmap; iter.next(&bitmap);) {
285 delete bitmap;
286 }
287
288 // Destroy array chunk queues
289 for (uint i = 0; i < _array_chunk_queues.size(); i++) {
290 delete _array_chunk_queues.queue(i);
291 }
292
293 // Destroy queues
294 for (uint i = 0; i < _queues.size(); i++) {
295 delete _queues.queue(i);
296 }
297
298 // Clear claimed CLD bits
299 ClassLoaderDataGraph::clear_claimed_marks(ClassLoaderData::_claim_other);
300 }
301
302 static size_t object_index_max() {
303 return ZGranuleSize >> ZObjectAlignmentSmallShift;
304 }
305
306 static size_t object_index(oop obj) {
307 const zaddress addr = to_zaddress(obj);
308 const zoffset offset = ZAddress::offset(addr);
309 const uintptr_t mask = ZGranuleSize - 1;
310 return (untype(offset) & mask) >> ZObjectAlignmentSmallShift;
311 }
312
313 ZHeapIteratorBitMap* ZHeapIterator::object_bitmap(oop obj) {
314 const zoffset offset = ZAddress::offset(to_zaddress(obj));
315 ZHeapIteratorBitMap* bitmap = _bitmaps.get_acquire(offset);
316 if (bitmap == nullptr) {
317 ZLocker<ZLock> locker(&_bitmaps_lock);
318 bitmap = _bitmaps.get(offset);
319 if (bitmap == nullptr) {
320 // Install new bitmap
321 bitmap = new ZHeapIteratorBitMap(object_index_max());
322 _bitmaps.release_put(offset, bitmap);
323 }
324 }
325
326 return bitmap;
327 }
328
329 bool ZHeapIterator::should_visit_object_at_mark() const {
330 // Verify wants to visit objects as soon as they are found.
331 return _for_verify;
332 }
333
334 bool ZHeapIterator::should_visit_object_at_follow() const {
335 // Non-verify code needs to be careful and visit the objects
336 // during the follow stage, where we've completed the root
337 // iteration. This prevents lock-ordering problems between
338 // the root iterator and the visit closures.
339 return !_for_verify;
340 }
341
342 bool ZHeapIterator::mark_object(oop obj) {
343 if (obj == nullptr) {
344 return false;
345 }
346
347 ZHeapIteratorBitMap* const bitmap = object_bitmap(obj);
348 const size_t index = object_index(obj);
349 return bitmap->try_set_bit(index);
350 }
351
352 typedef ClaimingCLDToOopClosure<ClassLoaderData::_claim_other> ZHeapIteratorCLDClosure;
353
354 class ZHeapIteratorNMethodClosure : public NMethodClosure {
355 private:
356 OopClosure* const _cl;
357 BarrierSetNMethod* const _bs_nm;
358
359 public:
360 ZHeapIteratorNMethodClosure(OopClosure* cl)
361 : _cl(cl),
362 _bs_nm(BarrierSet::barrier_set()->barrier_set_nmethod()) {}
363
364 virtual void do_nmethod(nmethod* nm) {
365 // If ClassUnloading is turned off, all nmethods are considered strong,
366 // not only those on the call stacks. The heap iteration might happen
367 // before the concurrent processing of the code cache, make sure that
368 // all nmethods have been processed before visiting the oops.
369 _bs_nm->nmethod_entry_barrier(nm);
370
371 ZNMethod::nmethod_oops_do(nm, _cl);
372 }
373 };
374
375 class ZHeapIteratorThreadClosure : public ThreadClosure {
376 private:
377 OopClosure* const _cl;
378 NMethodClosure* const _nm_cl;
379
380 public:
381 ZHeapIteratorThreadClosure(OopClosure* cl, NMethodClosure* nm_cl)
382 : _cl(cl),
383 _nm_cl(nm_cl) {}
384
385 void do_thread(Thread* thread) {
386 thread->oops_do(_cl, _nm_cl);
387 }
388 };
389
390 void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
391 {
392 ZHeapIteratorColoredRootOopClosure<false /* Weak */> cl(this, context);
393 ZHeapIteratorCLDClosure cld_cl(&cl);
394
395 _roots_colored.apply(&cl,
396 &cld_cl);
397 }
398
399 {
400 ZHeapIteratorUncoloredRootOopClosure cl(this, context);
401 ZHeapIteratorNMethodClosure nm_cl(&cl);
402 ZHeapIteratorThreadClosure thread_cl(&cl, &nm_cl);
403 _roots_uncolored.apply(&thread_cl,
404 &nm_cl);
405 }
406 }
407
408 void ZHeapIterator::push_weak_roots(const ZHeapIteratorContext& context) {
409 ZHeapIteratorColoredRootOopClosure<true /* Weak */> cl(this, context);
410 _roots_weak_colored.apply(&cl);
411 }
412
413 template <bool VisitWeaks>
414 void ZHeapIterator::push_roots(const ZHeapIteratorContext& context) {
415 push_strong_roots(context);
416 if (VisitWeaks) {
417 push_weak_roots(context);
418 }
419 }
420
421 void ZHeapIterator::mark_visit_and_push(const ZHeapIteratorContext& context, oop obj) {
422 if (mark_object(obj)) {
423 if (should_visit_object_at_mark()) {
424 context.visit_object(obj);
425 }
426 context.push(obj);
427 }
428 }
429
430 template <bool VisitReferents>
431 void ZHeapIterator::follow_object(const ZHeapIteratorContext& context, oop obj) {
432 ZHeapIteratorOopClosure<VisitReferents> cl(this, context, obj);
433 ZIterator::oop_iterate(obj, &cl);
434 }
435
436 void ZHeapIterator::follow_array(const ZHeapIteratorContext& context, oop obj) {
437 // Follow klass
438 ZHeapIteratorOopClosure<false /* VisitReferents */> cl(this, context, obj);
439 cl.do_klass(obj->klass());
440
441 // Push array chunk
442 context.push_array_chunk(ObjArrayTask(obj, 0 /* index */));
443 }
444
445 void ZHeapIterator::follow_array_chunk(const ZHeapIteratorContext& context, const ObjArrayTask& array) {
446 const objArrayOop obj = objArrayOop(array.obj());
447 const int length = obj->length();
448 const int start = array.index();
449 const int stride = MIN2<int>(length - start, (int)ObjArrayMarkingStride);
450 const int end = start + stride;
451
452 // Push remaining array chunk first
453 if (end < length) {
454 context.push_array_chunk(ObjArrayTask(obj, end));
455 }
456
457 // Follow array chunk
458 ZHeapIteratorOopClosure<false /* VisitReferents */> cl(this, context, obj);
459 ZIterator::oop_iterate_range(obj, &cl, start, end);
460 }
461
462 template <bool VisitWeaks>
463 void ZHeapIterator::follow(const ZHeapIteratorContext& context, oop obj) {
464 // Follow
465 if (obj->is_objArray()) {
466 follow_array(context, obj);
467 } else {
468 follow_object<VisitWeaks>(context, obj);
469 }
470 }
471
472 template <bool VisitWeaks>
473 void ZHeapIterator::visit_and_follow(const ZHeapIteratorContext& context, oop obj) {
474 if (should_visit_object_at_follow()) {
475 context.visit_object(obj);
476 }
477
478 follow<VisitWeaks>(context, obj);
479 }
480
481 template <bool VisitWeaks>
482 void ZHeapIterator::drain(const ZHeapIteratorContext& context) {
483 ObjArrayTask array;
484 oop obj;
485
486 do {
487 while (context.pop(obj)) {
488 visit_and_follow<VisitWeaks>(context, obj);
489 }
490
491 if (context.pop_array_chunk(array)) {
492 follow_array_chunk(context, array);
493 }
494 } while (!context.is_drained());
495 }
496
497 template <bool VisitWeaks>
498 void ZHeapIterator::steal(const ZHeapIteratorContext& context) {
499 ObjArrayTask array;
500 oop obj;
501
502 if (steal_array_chunk(context, array)) {
503 follow_array_chunk(context, array);
504 } else if (steal(context, obj)) {
505 visit_and_follow<VisitWeaks>(context, obj);
506 }
507 }
508
509 bool ZHeapIterator::steal(const ZHeapIteratorContext& context, oop& obj) {
510 return _queues.steal(context.worker_id(), obj);
511 }
512
513 bool ZHeapIterator::steal_array_chunk(const ZHeapIteratorContext& context, ObjArrayTask& array) {
514 return _array_chunk_queues.steal(context.worker_id(), array);
515 }
516
517 template <bool VisitWeaks>
518 void ZHeapIterator::drain_and_steal(const ZHeapIteratorContext& context) {
519 do {
520 drain<VisitWeaks>(context);
521 steal<VisitWeaks>(context);
522 } while (!context.is_drained() || !_terminator.offer_termination());
523 }
524
525 template <bool VisitWeaks>
526 void ZHeapIterator::object_iterate_inner(const ZHeapIteratorContext& context) {
527 push_roots<VisitWeaks>(context);
528 drain_and_steal<VisitWeaks>(context);
529 }
530
531 void ZHeapIterator::object_iterate(ObjectClosure* object_cl, uint worker_id) {
532 object_and_field_iterate(object_cl, nullptr /* field_cl */, worker_id);
533 }
534
535 void ZHeapIterator::object_and_field_iterate(ObjectClosure* object_cl, OopFieldClosure* field_cl, uint worker_id) {
536 const ZHeapIteratorContext context(object_cl,
537 field_cl,
538 worker_id,
539 _queues.queue(worker_id),
540 _array_chunk_queues.queue(worker_id));
541
542 if (_visit_weaks) {
543 object_iterate_inner<true /* VisitWeaks */>(context);
544 } else {
545 object_iterate_inner<false /* VisitWeaks */>(context);
546 }
547 }