1 /*
2 * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "gc/shared/workerThread.hpp"
29 #include "gc/shenandoah/shenandoahClosures.inline.hpp"
30 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
31 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
32 #include "gc/shenandoah/shenandoahUtils.hpp"
33 #include "runtime/atomic.hpp"
34 #include "logging/log.hpp"
35
36 static ReferenceType reference_type(oop reference) {
37 return InstanceKlass::cast(reference->klass())->reference_type();
38 }
39
40 static const char* reference_type_name(ReferenceType type) {
41 switch (type) {
42 case REF_SOFT:
43 return "Soft";
44
45 case REF_WEAK:
46 return "Weak";
47
48 case REF_FINAL:
49 return "Final";
50
51 case REF_PHANTOM:
52 return "Phantom";
53
54 default:
55 ShouldNotReachHere();
56 return nullptr;
57 }
58 }
59
60 template <typename T>
61 static void set_oop_field(T* field, oop value);
62
63 template <>
64 void set_oop_field<oop>(oop* field, oop value) {
65 *field = value;
66 }
67
68 template <>
69 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
70 *field = CompressedOops::encode(value);
71 }
72
73 static oop lrb(oop obj) {
74 if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
75 return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
76 } else {
77 return obj;
78 }
79 }
80
81 template <typename T>
82 static volatile T* reference_referent_addr(oop reference) {
83 return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
84 }
85
86 inline oop reference_coop_decode_raw(narrowOop v) {
87 return CompressedOops::is_null(v) ? nullptr : CompressedOops::decode_raw(v);
88 }
89
90 inline oop reference_coop_decode_raw(oop v) {
251 }
252
253 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
254 if (type != REF_SOFT) {
255 // Not a SoftReference
256 return false;
257 }
258
259 // Ask SoftReference policy
260 const jlong clock = java_lang_ref_SoftReference::clock();
261 assert(clock != 0, "Clock not initialized");
262 assert(_soft_reference_policy != nullptr, "Policy not initialized");
263 return !_soft_reference_policy->should_clear_reference(reference, clock);
264 }
265
266 template <typename T>
267 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
268 T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
269 T heap_oop = RawAccess<>::oop_load(referent_addr);
270 oop referent = CompressedOops::decode(heap_oop);
271
272 if (is_inactive<T>(reference, referent, type)) {
273 log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
274 return false;
275 }
276
277 if (is_strongly_live(referent)) {
278 log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
279 return false;
280 }
281
282 if (is_softly_live(reference, type)) {
283 log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
284 return false;
285 }
286
287 return true;
288 }
289
290 template <typename T>
291 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
292 HeapWord* raw_referent = reference_referent_raw<T>(reference);
293 if (raw_referent == nullptr) {
294 // Reference has been cleared, by a call to Reference.enqueue()
295 // or Reference.clear() from the application, which means we
296 // should drop the reference.
297 return true;
298 }
299
300 // Check if the referent is still alive, in which case we should
301 // drop the reference.
302 if (type == REF_PHANTOM) {
303 return ShenandoahHeap::heap()->complete_marking_context()->is_marked(raw_referent);
304 } else {
305 return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(raw_referent);
306 }
332 if (reference_discovered<T>(reference) != nullptr) {
333 // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
334 // in which case it will be seen 2x by marking.
335 log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
336 return true;
337 }
338
339 if (type == REF_FINAL) {
340 ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
341 bool weak = cl->is_weak();
342 cl->set_weak(true);
343 if (UseCompressedOops) {
344 cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
345 } else {
346 cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
347 }
348 cl->set_weak(weak);
349 }
350
351 // Add reference to discovered list
352 ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
353 oop discovered_head = refproc_data.discovered_list_head<T>();
354 if (discovered_head == nullptr) {
355 // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
356 // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
357 discovered_head = reference;
358 }
359 if (reference_cas_discovered<T>(reference, discovered_head)) {
360 refproc_data.set_discovered_list_head<T>(reference);
361 assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
362 log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
363 _ref_proc_thread_locals[worker_id].inc_discovered(type);
364 }
365 return true;
366 }
367
368 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
369 if (!RegisterReferences) {
370 // Reference processing disabled
371 return false;
372 }
373
374 log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
375 uint worker_id = WorkerThread::worker_id();
376 _ref_proc_thread_locals[worker_id].inc_encountered(type);
377
378 if (UseCompressedOops) {
379 return discover<narrowOop>(reference, type, worker_id);
380 } else {
381 return discover<oop>(reference, type, worker_id);
382 }
383 }
384
385 template <typename T>
386 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
387 log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
388
389 #ifdef ASSERT
390 HeapWord* raw_referent = reference_referent_raw<T>(reference);
391 assert(raw_referent == nullptr || ShenandoahHeap::heap()->marking_context()->is_marked(raw_referent),
392 "only drop references with alive referents");
393 #endif
394
395 // Unlink and return next in list
396 oop next = reference_discovered<T>(reference);
397 reference_set_discovered<T>(reference, nullptr);
398 return next;
399 }
400
401 template <typename T>
402 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
403 log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
404
405 // Update statistics
406 _ref_proc_thread_locals[worker_id].inc_enqueued(type);
407
408 // Make reference inactive
409 make_inactive<T>(reference, type);
410
411 // Return next in list
412 return reference_discovered_addr<T>(reference);
413 }
414
415 template <typename T>
416 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {
417 log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
429 break;
430 }
431 log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
432 const ReferenceType type = reference_type(reference);
433
434 if (should_drop<T>(reference, type)) {
435 set_oop_field(p, drop<T>(reference, type));
436 } else {
437 p = keep<T>(reference, type, worker_id);
438 }
439
440 const oop discovered = lrb(reference_discovered<T>(reference));
441 if (reference == discovered) {
442 // Reset terminating self-loop to null
443 reference_set_discovered<T>(reference, oop(nullptr));
444 break;
445 }
446 }
447
448 // Prepend discovered references to internal pending list
449 if (!CompressedOops::is_null(*list)) {
450 oop head = lrb(CompressedOops::decode_not_null(*list));
451 shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
452 oop prev = Atomic::xchg(&_pending_list, head);
453 RawAccess<>::oop_store(p, prev);
454 if (prev == nullptr) {
455 // First to prepend to list, record tail
456 _pending_list_tail = reinterpret_cast<void*>(p);
457 }
458
459 // Clear discovered list
460 set_oop_field(list, oop(nullptr));
461 }
462 }
463
464 void ShenandoahReferenceProcessor::work() {
465 // Process discovered references
466 uint max_workers = ShenandoahHeap::heap()->max_workers();
467 uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
468 while (worker_id < max_workers) {
469 if (UseCompressedOops) {
470 process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
471 } else {
472 process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
473 }
505 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
506
507 Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
508
509 // Process discovered lists
510 ShenandoahReferenceProcessorTask task(phase, concurrent, this);
511 workers->run_task(&task);
512
513 // Update SoftReference clock
514 soft_reference_update_clock();
515
516 // Collect, log and trace statistics
517 collect_statistics();
518
519 enqueue_references(concurrent);
520 }
521
522 void ShenandoahReferenceProcessor::enqueue_references_locked() {
523 // Prepend internal pending list to external pending list
524 shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
525 if (UseCompressedOops) {
526 *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
527 } else {
528 *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
529 }
530 }
531
532 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
533 if (_pending_list == nullptr) {
534 // Nothing to enqueue
535 return;
536 }
537
538 if (!concurrent) {
539 // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
540 enqueue_references_locked();
541 } else {
542 // Heap_lock protects external pending list
543 MonitorLocker ml(Heap_lock);
544
545 enqueue_references_locked();
546
547 // Notify ReferenceHandler thread
548 ml.notify_all();
549 }
550
551 // Reset internal pending list
552 _pending_list = nullptr;
553 _pending_list_tail = &_pending_list;
554 }
555
556 template<typename T>
557 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
|
1 /*
2 * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
4 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
5 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 *
7 * This code is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License version 2 only, as
9 * published by the Free Software Foundation.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 *
25 */
26
27 #include "precompiled.hpp"
28 #include "classfile/javaClasses.hpp"
29 #include "gc/shared/workerThread.hpp"
30 #include "gc/shenandoah/shenandoahGeneration.hpp"
31 #include "gc/shenandoah/shenandoahClosures.inline.hpp"
32 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
33 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
34 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
35 #include "gc/shenandoah/shenandoahUtils.hpp"
36 #include "runtime/atomic.hpp"
37 #include "logging/log.hpp"
38
39 static ReferenceType reference_type(oop reference) {
40 return InstanceKlass::cast(reference->klass())->reference_type();
41 }
42
43 static const char* reference_type_name(ReferenceType type) {
44 switch (type) {
45 case REF_SOFT:
46 return "Soft";
47
48 case REF_WEAK:
49 return "Weak";
50
51 case REF_FINAL:
52 return "Final";
53
54 case REF_PHANTOM:
55 return "Phantom";
56
57 default:
58 ShouldNotReachHere();
59 return nullptr;
60 }
61 }
62
63 template <typename T>
64 static void card_mark_barrier(T* field, oop value) {
65 assert(ShenandoahCardBarrier, "Card-mark barrier should be on");
66 ShenandoahGenerationalHeap* heap = ShenandoahGenerationalHeap::heap();
67 assert(heap->is_in_or_null(value), "Should be in heap");
68 if (heap->is_in_old(field) && heap->is_in_young(value)) {
69 // For Shenandoah, each generation collects all the _referents_ that belong to the
70 // collected generation. We can end up with discovered lists that contain a mixture
71 // of old and young _references_. These references are linked together through the
72 // discovered field in java.lang.Reference. In some cases, creating or editing this
73 // list may result in the creation of _new_ old-to-young pointers which must dirty
74 // the corresponding card. Failing to do this may cause heap verification errors and
75 // lead to incorrect GC behavior.
76 heap->old_generation()->mark_card_as_dirty(field);
77 }
78 }
79
80 template <typename T>
81 static void set_oop_field(T* field, oop value);
82
83 template <>
84 void set_oop_field<oop>(oop* field, oop value) {
85 *field = value;
86 if (ShenandoahCardBarrier) {
87 card_mark_barrier(field, value);
88 }
89 }
90
91 template <>
92 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
93 *field = CompressedOops::encode(value);
94 if (ShenandoahCardBarrier) {
95 card_mark_barrier(field, value);
96 }
97 }
98
99 static oop lrb(oop obj) {
100 if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
101 return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
102 } else {
103 return obj;
104 }
105 }
106
107 template <typename T>
108 static volatile T* reference_referent_addr(oop reference) {
109 return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
110 }
111
112 inline oop reference_coop_decode_raw(narrowOop v) {
113 return CompressedOops::is_null(v) ? nullptr : CompressedOops::decode_raw(v);
114 }
115
116 inline oop reference_coop_decode_raw(oop v) {
277 }
278
279 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
280 if (type != REF_SOFT) {
281 // Not a SoftReference
282 return false;
283 }
284
285 // Ask SoftReference policy
286 const jlong clock = java_lang_ref_SoftReference::clock();
287 assert(clock != 0, "Clock not initialized");
288 assert(_soft_reference_policy != nullptr, "Policy not initialized");
289 return !_soft_reference_policy->should_clear_reference(reference, clock);
290 }
291
292 template <typename T>
293 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
294 T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
295 T heap_oop = RawAccess<>::oop_load(referent_addr);
296 oop referent = CompressedOops::decode(heap_oop);
297 ShenandoahHeap* heap = ShenandoahHeap::heap();
298
299 if (is_inactive<T>(reference, referent, type)) {
300 log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
301 return false;
302 }
303
304 if (is_strongly_live(referent)) {
305 log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
306 return false;
307 }
308
309 if (is_softly_live(reference, type)) {
310 log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
311 return false;
312 }
313
314 if (!heap->is_in_active_generation(referent)) {
315 log_trace(gc,ref)("Referent outside of active generation: " PTR_FORMAT, p2i(referent));
316 return false;
317 }
318
319 return true;
320 }
321
322 template <typename T>
323 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
324 HeapWord* raw_referent = reference_referent_raw<T>(reference);
325 if (raw_referent == nullptr) {
326 // Reference has been cleared, by a call to Reference.enqueue()
327 // or Reference.clear() from the application, which means we
328 // should drop the reference.
329 return true;
330 }
331
332 // Check if the referent is still alive, in which case we should
333 // drop the reference.
334 if (type == REF_PHANTOM) {
335 return ShenandoahHeap::heap()->complete_marking_context()->is_marked(raw_referent);
336 } else {
337 return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(raw_referent);
338 }
364 if (reference_discovered<T>(reference) != nullptr) {
365 // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
366 // in which case it will be seen 2x by marking.
367 log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
368 return true;
369 }
370
371 if (type == REF_FINAL) {
372 ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
373 bool weak = cl->is_weak();
374 cl->set_weak(true);
375 if (UseCompressedOops) {
376 cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
377 } else {
378 cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
379 }
380 cl->set_weak(weak);
381 }
382
383 // Add reference to discovered list
384 // Each worker thread has a private copy of refproc_data, which includes a private discovered list. This means
385 // there's no risk that a different worker thread will try to manipulate my discovered list head while I'm making
386 // reference the head of my discovered list.
387 ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
388 oop discovered_head = refproc_data.discovered_list_head<T>();
389 if (discovered_head == nullptr) {
390 // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
391 // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
392 discovered_head = reference;
393 }
394 if (reference_cas_discovered<T>(reference, discovered_head)) {
395 // We successfully set this reference object's next pointer to discovered_head. This marks reference as discovered.
396 // If reference_cas_discovered fails, that means some other worker thread took credit for discovery of this reference,
397 // and that other thread will place reference on its discovered list, so I can ignore reference.
398
399 // In case we have created an interesting pointer, mark the remembered set card as dirty.
400 if (ShenandoahCardBarrier) {
401 T* addr = reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
402 card_mark_barrier(addr, discovered_head);
403 }
404
405 // Make the discovered_list_head point to reference.
406 refproc_data.set_discovered_list_head<T>(reference);
407 assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
408 log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
409 _ref_proc_thread_locals[worker_id].inc_discovered(type);
410 }
411 return true;
412 }
413
414 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
415 if (!RegisterReferences) {
416 // Reference processing disabled
417 return false;
418 }
419
420 log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s, %s)",
421 p2i(reference), reference_type_name(type), ShenandoahHeap::heap()->heap_region_containing(reference)->affiliation_name());
422 uint worker_id = WorkerThread::worker_id();
423 _ref_proc_thread_locals[worker_id].inc_encountered(type);
424
425 if (UseCompressedOops) {
426 return discover<narrowOop>(reference, type, worker_id);
427 } else {
428 return discover<oop>(reference, type, worker_id);
429 }
430 }
431
432 template <typename T>
433 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
434 log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
435
436 HeapWord* raw_referent = reference_referent_raw<T>(reference);
437
438 #ifdef ASSERT
439 assert(raw_referent == nullptr || ShenandoahHeap::heap()->marking_context()->is_marked(raw_referent),
440 "only drop references with alive referents");
441 #endif
442
443 // Unlink and return next in list
444 oop next = reference_discovered<T>(reference);
445 reference_set_discovered<T>(reference, nullptr);
446 // When this reference was discovered, it would not have been marked. If it ends up surviving
447 // the cycle, we need to dirty the card if the reference is old and the referent is young. Note
448 // that if the reference is not dropped, then its pointer to the referent will be nulled before
449 // evacuation begins so card does not need to be dirtied.
450 if (ShenandoahCardBarrier) {
451 card_mark_barrier(cast_from_oop<HeapWord*>(reference), cast_to_oop(raw_referent));
452 }
453 return next;
454 }
455
456 template <typename T>
457 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
458 log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
459
460 // Update statistics
461 _ref_proc_thread_locals[worker_id].inc_enqueued(type);
462
463 // Make reference inactive
464 make_inactive<T>(reference, type);
465
466 // Return next in list
467 return reference_discovered_addr<T>(reference);
468 }
469
470 template <typename T>
471 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {
472 log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
484 break;
485 }
486 log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
487 const ReferenceType type = reference_type(reference);
488
489 if (should_drop<T>(reference, type)) {
490 set_oop_field(p, drop<T>(reference, type));
491 } else {
492 p = keep<T>(reference, type, worker_id);
493 }
494
495 const oop discovered = lrb(reference_discovered<T>(reference));
496 if (reference == discovered) {
497 // Reset terminating self-loop to null
498 reference_set_discovered<T>(reference, oop(nullptr));
499 break;
500 }
501 }
502
503 // Prepend discovered references to internal pending list
504 // set_oop_field maintains the card mark barrier as this list is constructed.
505 if (!CompressedOops::is_null(*list)) {
506 oop head = lrb(CompressedOops::decode_not_null(*list));
507 shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
508 oop prev = Atomic::xchg(&_pending_list, head);
509 set_oop_field(p, prev);
510 if (prev == nullptr) {
511 // First to prepend to list, record tail
512 _pending_list_tail = reinterpret_cast<void*>(p);
513 }
514
515 // Clear discovered list
516 set_oop_field(list, oop(nullptr));
517 }
518 }
519
520 void ShenandoahReferenceProcessor::work() {
521 // Process discovered references
522 uint max_workers = ShenandoahHeap::heap()->max_workers();
523 uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
524 while (worker_id < max_workers) {
525 if (UseCompressedOops) {
526 process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
527 } else {
528 process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
529 }
561 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
562
563 Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
564
565 // Process discovered lists
566 ShenandoahReferenceProcessorTask task(phase, concurrent, this);
567 workers->run_task(&task);
568
569 // Update SoftReference clock
570 soft_reference_update_clock();
571
572 // Collect, log and trace statistics
573 collect_statistics();
574
575 enqueue_references(concurrent);
576 }
577
578 void ShenandoahReferenceProcessor::enqueue_references_locked() {
579 // Prepend internal pending list to external pending list
580 shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
581
582 // During reference processing, we maintain a local list of references that are identified by
583 // _pending_list and _pending_list_tail. _pending_list_tail points to the next field of the last Reference object on
584 // the local list.
585 //
586 // There is also a global list of reference identified by Universe::_reference_pending_list
587
588 // The following code has the effect of:
589 // 1. Making the global Universe::_reference_pending_list point to my local list
590 // 2. Overwriting the next field of the last Reference on my local list to point at the previous head of the
591 // global Universe::_reference_pending_list
592
593 oop former_head_of_global_list = Universe::swap_reference_pending_list(_pending_list);
594 if (UseCompressedOops) {
595 set_oop_field<narrowOop>(reinterpret_cast<narrowOop*>(_pending_list_tail), former_head_of_global_list);
596 } else {
597 set_oop_field<oop>(reinterpret_cast<oop*>(_pending_list_tail), former_head_of_global_list);
598 }
599 }
600
601 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
602 if (_pending_list == nullptr) {
603 // Nothing to enqueue
604 return;
605 }
606 if (!concurrent) {
607 // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
608 enqueue_references_locked();
609 } else {
610 // Heap_lock protects external pending list
611 MonitorLocker ml(Heap_lock);
612
613 enqueue_references_locked();
614
615 // Notify ReferenceHandler thread
616 ml.notify_all();
617 }
618
619 // Reset internal pending list
620 _pending_list = nullptr;
621 _pending_list_tail = &_pending_list;
622 }
623
624 template<typename T>
625 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
|