1 /*
2 * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "gc/shared/workerThread.hpp"
29 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
30 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
31 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
32 #include "gc/shenandoah/shenandoahUtils.hpp"
33 #include "runtime/atomic.hpp"
34 #include "logging/log.hpp"
35
36 static ReferenceType reference_type(oop reference) {
37 return InstanceKlass::cast(reference->klass())->reference_type();
38 }
39
40 static const char* reference_type_name(ReferenceType type) {
41 switch (type) {
42 case REF_SOFT:
43 return "Soft";
44
45 case REF_WEAK:
46 return "Weak";
47
48 case REF_FINAL:
49 return "Final";
50
51 case REF_PHANTOM:
52 return "Phantom";
53
54 default:
55 ShouldNotReachHere();
56 return nullptr;
57 }
58 }
59
60 template <typename T>
61 static void set_oop_field(T* field, oop value);
62
63 template <>
64 void set_oop_field<oop>(oop* field, oop value) {
65 *field = value;
66 }
67
68 template <>
69 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
70 *field = CompressedOops::encode(value);
71 }
72
73 static oop lrb(oop obj) {
74 if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
75 return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
76 } else {
77 return obj;
78 }
79 }
80
81 template <typename T>
82 static volatile T* reference_referent_addr(oop reference) {
83 return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
84 }
85
86 template <typename T>
87 static oop reference_referent(oop reference) {
88 T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
89 return CompressedOops::decode(heap_oop);
90 }
240 }
241
242 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
243 if (type != REF_SOFT) {
244 // Not a SoftReference
245 return false;
246 }
247
248 // Ask SoftReference policy
249 const jlong clock = java_lang_ref_SoftReference::clock();
250 assert(clock != 0, "Clock not initialized");
251 assert(_soft_reference_policy != nullptr, "Policy not initialized");
252 return !_soft_reference_policy->should_clear_reference(reference, clock);
253 }
254
255 template <typename T>
256 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
257 T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
258 T heap_oop = RawAccess<>::oop_load(referent_addr);
259 oop referent = CompressedOops::decode(heap_oop);
260
261 if (is_inactive<T>(reference, referent, type)) {
262 log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
263 return false;
264 }
265
266 if (is_strongly_live(referent)) {
267 log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
268 return false;
269 }
270
271 if (is_softly_live(reference, type)) {
272 log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
273 return false;
274 }
275
276 return true;
277 }
278
279 template <typename T>
280 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
281 const oop referent = reference_referent<T>(reference);
282 if (referent == nullptr) {
283 // Reference has been cleared, by a call to Reference.enqueue()
284 // or Reference.clear() from the application, which means we
285 // should drop the reference.
286 return true;
287 }
288
289 // Check if the referent is still alive, in which case we should
290 // drop the reference.
291 if (type == REF_PHANTOM) {
292 return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
293 } else {
294 return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
295 }
321 if (reference_discovered<T>(reference) != nullptr) {
322 // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
323 // in which case it will be seen 2x by marking.
324 log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
325 return true;
326 }
327
328 if (type == REF_FINAL) {
329 ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
330 bool weak = cl->is_weak();
331 cl->set_weak(true);
332 if (UseCompressedOops) {
333 cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
334 } else {
335 cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
336 }
337 cl->set_weak(weak);
338 }
339
340 // Add reference to discovered list
341 ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
342 oop discovered_head = refproc_data.discovered_list_head<T>();
343 if (discovered_head == nullptr) {
344 // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
345 // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
346 discovered_head = reference;
347 }
348 if (reference_cas_discovered<T>(reference, discovered_head)) {
349 refproc_data.set_discovered_list_head<T>(reference);
350 assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
351 log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
352 _ref_proc_thread_locals[worker_id].inc_discovered(type);
353 }
354 return true;
355 }
356
357 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
358 if (!RegisterReferences) {
359 // Reference processing disabled
360 return false;
361 }
362
363 log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
364 uint worker_id = WorkerThread::worker_id();
365 _ref_proc_thread_locals->inc_encountered(type);
366
367 if (UseCompressedOops) {
368 return discover<narrowOop>(reference, type, worker_id);
369 } else {
370 return discover<oop>(reference, type, worker_id);
371 }
372 }
373
374 template <typename T>
375 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
376 log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
377
378 #ifdef ASSERT
379 oop referent = reference_referent<T>(reference);
380 assert(referent == nullptr || ShenandoahHeap::heap()->marking_context()->is_marked(referent),
381 "only drop references with alive referents");
382 #endif
383
384 // Unlink and return next in list
385 oop next = reference_discovered<T>(reference);
386 reference_set_discovered<T>(reference, nullptr);
387 return next;
388 }
389
390 template <typename T>
391 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
392 log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
393
394 // Update statistics
395 _ref_proc_thread_locals[worker_id].inc_enqueued(type);
396
397 // Make reference inactive
398 make_inactive<T>(reference, type);
399
400 // Return next in list
401 return reference_discovered_addr<T>(reference);
402 }
403
404 template <typename T>
405 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {;
406 log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
418 break;
419 }
420 log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
421 const ReferenceType type = reference_type(reference);
422
423 if (should_drop<T>(reference, type)) {
424 set_oop_field(p, drop<T>(reference, type));
425 } else {
426 p = keep<T>(reference, type, worker_id);
427 }
428
429 const oop discovered = lrb(reference_discovered<T>(reference));
430 if (reference == discovered) {
431 // Reset terminating self-loop to null
432 reference_set_discovered<T>(reference, oop(nullptr));
433 break;
434 }
435 }
436
437 // Prepend discovered references to internal pending list
438 if (!CompressedOops::is_null(*list)) {
439 oop head = lrb(CompressedOops::decode_not_null(*list));
440 shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
441 oop prev = Atomic::xchg(&_pending_list, head);
442 RawAccess<>::oop_store(p, prev);
443 if (prev == nullptr) {
444 // First to prepend to list, record tail
445 _pending_list_tail = reinterpret_cast<void*>(p);
446 }
447
448 // Clear discovered list
449 set_oop_field(list, oop(nullptr));
450 }
451 }
452
453 void ShenandoahReferenceProcessor::work() {
454 // Process discovered references
455 uint max_workers = ShenandoahHeap::heap()->max_workers();
456 uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
457 while (worker_id < max_workers) {
458 if (UseCompressedOops) {
459 process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
460 } else {
461 process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
462 }
494 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
495
496 Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
497
498 // Process discovered lists
499 ShenandoahReferenceProcessorTask task(phase, concurrent, this);
500 workers->run_task(&task);
501
502 // Update SoftReference clock
503 soft_reference_update_clock();
504
505 // Collect, log and trace statistics
506 collect_statistics();
507
508 enqueue_references(concurrent);
509 }
510
511 void ShenandoahReferenceProcessor::enqueue_references_locked() {
512 // Prepend internal pending list to external pending list
513 shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
514 if (UseCompressedOops) {
515 *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
516 } else {
517 *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
518 }
519 }
520
521 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
522 if (_pending_list == nullptr) {
523 // Nothing to enqueue
524 return;
525 }
526
527 if (!concurrent) {
528 // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
529 enqueue_references_locked();
530 } else {
531 // Heap_lock protects external pending list
532 MonitorLocker ml(Heap_lock);
533
534 enqueue_references_locked();
535
536 // Notify ReferenceHandler thread
537 ml.notify_all();
538 }
539
540 // Reset internal pending list
541 _pending_list = nullptr;
542 _pending_list_tail = &_pending_list;
543 }
544
545 template<typename T>
546 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
|
1 /*
2 * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
4 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
5 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 *
7 * This code is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License version 2 only, as
9 * published by the Free Software Foundation.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 *
25 */
26
27 #include "precompiled.hpp"
28 #include "classfile/javaClasses.hpp"
29 #include "gc/shared/workerThread.hpp"
30 #include "gc/shenandoah/shenandoahGeneration.hpp"
31 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
32 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
33 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
34 #include "gc/shenandoah/shenandoahUtils.hpp"
35 #include "runtime/atomic.hpp"
36 #include "logging/log.hpp"
37
38 static ReferenceType reference_type(oop reference) {
39 return InstanceKlass::cast(reference->klass())->reference_type();
40 }
41
42 static const char* reference_type_name(ReferenceType type) {
43 switch (type) {
44 case REF_SOFT:
45 return "Soft";
46
47 case REF_WEAK:
48 return "Weak";
49
50 case REF_FINAL:
51 return "Final";
52
53 case REF_PHANTOM:
54 return "Phantom";
55
56 default:
57 ShouldNotReachHere();
58 return nullptr;
59 }
60 }
61
62 template <typename T>
63 static void card_mark_barrier(T* field, oop value) {
64 ShenandoahHeap* heap = ShenandoahHeap::heap();
65 assert(heap->is_in_or_null(value), "Should be in heap");
66 assert(ShenandoahCardBarrier, "Card-mark barrier should be on");
67 if (heap->is_in_old(field) && heap->is_in_young(value)) {
68 // For Shenandoah, each generation collects all the _referents_ that belong to the
69 // collected generation. We can end up with discovered lists that contain a mixture
70 // of old and young _references_. These references are linked together through the
71 // discovered field in java.lang.Reference. In some cases, creating or editing this
72 // list may result in the creation of _new_ old-to-young pointers which must dirty
73 // the corresponding card. Failing to do this may cause heap verification errors and
74 // lead to incorrect GC behavior.
75 heap->card_scan()->mark_card_as_dirty(reinterpret_cast<HeapWord*>(field));
76 }
77 }
78
79 template <typename T>
80 static void set_oop_field(T* field, oop value);
81
82 template <>
83 void set_oop_field<oop>(oop* field, oop value) {
84 *field = value;
85 if (ShenandoahCardBarrier) {
86 card_mark_barrier(field, value);
87 }
88 }
89
90 template <>
91 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
92 *field = CompressedOops::encode(value);
93 if (ShenandoahCardBarrier) {
94 card_mark_barrier(field, value);
95 }
96 }
97
98 static oop lrb(oop obj) {
99 if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
100 return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
101 } else {
102 return obj;
103 }
104 }
105
106 template <typename T>
107 static volatile T* reference_referent_addr(oop reference) {
108 return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
109 }
110
111 template <typename T>
112 static oop reference_referent(oop reference) {
113 T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
114 return CompressedOops::decode(heap_oop);
115 }
265 }
266
267 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
268 if (type != REF_SOFT) {
269 // Not a SoftReference
270 return false;
271 }
272
273 // Ask SoftReference policy
274 const jlong clock = java_lang_ref_SoftReference::clock();
275 assert(clock != 0, "Clock not initialized");
276 assert(_soft_reference_policy != nullptr, "Policy not initialized");
277 return !_soft_reference_policy->should_clear_reference(reference, clock);
278 }
279
280 template <typename T>
281 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
282 T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
283 T heap_oop = RawAccess<>::oop_load(referent_addr);
284 oop referent = CompressedOops::decode(heap_oop);
285 ShenandoahHeap* heap = ShenandoahHeap::heap();
286
287 if (is_inactive<T>(reference, referent, type)) {
288 log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
289 return false;
290 }
291
292 if (is_strongly_live(referent)) {
293 log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
294 return false;
295 }
296
297 if (is_softly_live(reference, type)) {
298 log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
299 return false;
300 }
301
302 if (!heap->is_in_active_generation(referent)) {
303 log_trace(gc,ref)("Referent outside of active generation: " PTR_FORMAT, p2i(referent));
304 return false;
305 }
306
307 return true;
308 }
309
310 template <typename T>
311 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
312 const oop referent = reference_referent<T>(reference);
313 if (referent == nullptr) {
314 // Reference has been cleared, by a call to Reference.enqueue()
315 // or Reference.clear() from the application, which means we
316 // should drop the reference.
317 return true;
318 }
319
320 // Check if the referent is still alive, in which case we should
321 // drop the reference.
322 if (type == REF_PHANTOM) {
323 return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
324 } else {
325 return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
326 }
352 if (reference_discovered<T>(reference) != nullptr) {
353 // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
354 // in which case it will be seen 2x by marking.
355 log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
356 return true;
357 }
358
359 if (type == REF_FINAL) {
360 ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
361 bool weak = cl->is_weak();
362 cl->set_weak(true);
363 if (UseCompressedOops) {
364 cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
365 } else {
366 cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
367 }
368 cl->set_weak(weak);
369 }
370
371 // Add reference to discovered list
372 // Each worker thread has a private copy of refproc_data, which includes a private discovered list. This means
373 // there's no risk that a different worker thread will try to manipulate my discovered list head while I'm making
374 // reference the head of my discovered list.
375 ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
376 oop discovered_head = refproc_data.discovered_list_head<T>();
377 if (discovered_head == nullptr) {
378 // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
379 // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
380 discovered_head = reference;
381 }
382 if (reference_cas_discovered<T>(reference, discovered_head)) {
383 // We successfully set this reference object's next pointer to discovered_head. This marks reference as discovered.
384 // If reference_cas_discovered fails, that means some other worker thread took credit for discovery of this reference,
385 // and that other thread will place reference on its discovered list, so I can ignore reference.
386
387 // In case we have created an interesting pointer, mark the remembered set card as dirty.
388 ShenandoahHeap* heap = ShenandoahHeap::heap();
389 if (ShenandoahCardBarrier) {
390 T* addr = reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
391 card_mark_barrier(addr, discovered_head);
392 }
393
394 // Make the discovered_list_head point to reference.
395 refproc_data.set_discovered_list_head<T>(reference);
396 assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
397 log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
398 _ref_proc_thread_locals[worker_id].inc_discovered(type);
399 }
400 return true;
401 }
402
403 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
404 if (!RegisterReferences) {
405 // Reference processing disabled
406 return false;
407 }
408
409 log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s, %s)",
410 p2i(reference), reference_type_name(type), ShenandoahHeap::heap()->heap_region_containing(reference)->affiliation_name());
411 uint worker_id = WorkerThread::worker_id();
412 _ref_proc_thread_locals->inc_encountered(type);
413
414 if (UseCompressedOops) {
415 return discover<narrowOop>(reference, type, worker_id);
416 } else {
417 return discover<oop>(reference, type, worker_id);
418 }
419 }
420
421 template <typename T>
422 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
423 log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
424
425 ShenandoahHeap* heap = ShenandoahHeap::heap();
426 oop referent = reference_referent<T>(reference);
427 assert(referent == nullptr || heap->marking_context()->is_marked(referent), "only drop references with alive referents");
428
429 // Unlink and return next in list
430 oop next = reference_discovered<T>(reference);
431 reference_set_discovered<T>(reference, nullptr);
432 // When this reference was discovered, it would not have been marked. If it ends up surviving
433 // the cycle, we need to dirty the card if the reference is old and the referent is young. Note
434 // that if the reference is not dropped, then its pointer to the referent will be nulled before
435 // evacuation begins so card does not need to be dirtied.
436 if (heap->mode()->is_generational() && heap->is_in_old(reference) && heap->is_in_young(referent)) {
437 // Note: would be sufficient to mark only the card that holds the start of this Reference object.
438 heap->card_scan()->mark_range_as_dirty(cast_from_oop<HeapWord*>(reference), reference->size());
439 }
440 return next;
441 }
442
443 template <typename T>
444 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
445 log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
446
447 // Update statistics
448 _ref_proc_thread_locals[worker_id].inc_enqueued(type);
449
450 // Make reference inactive
451 make_inactive<T>(reference, type);
452
453 // Return next in list
454 return reference_discovered_addr<T>(reference);
455 }
456
457 template <typename T>
458 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {;
459 log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
471 break;
472 }
473 log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
474 const ReferenceType type = reference_type(reference);
475
476 if (should_drop<T>(reference, type)) {
477 set_oop_field(p, drop<T>(reference, type));
478 } else {
479 p = keep<T>(reference, type, worker_id);
480 }
481
482 const oop discovered = lrb(reference_discovered<T>(reference));
483 if (reference == discovered) {
484 // Reset terminating self-loop to null
485 reference_set_discovered<T>(reference, oop(nullptr));
486 break;
487 }
488 }
489
490 // Prepend discovered references to internal pending list
491 // set_oop_field maintains the card mark barrier as this list is constructed.
492 if (!CompressedOops::is_null(*list)) {
493 oop head = lrb(CompressedOops::decode_not_null(*list));
494 shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
495 oop prev = Atomic::xchg(&_pending_list, head);
496 set_oop_field(p, prev);
497 if (prev == nullptr) {
498 // First to prepend to list, record tail
499 _pending_list_tail = reinterpret_cast<void*>(p);
500 }
501
502 // Clear discovered list
503 set_oop_field(list, oop(nullptr));
504 }
505 }
506
507 void ShenandoahReferenceProcessor::work() {
508 // Process discovered references
509 uint max_workers = ShenandoahHeap::heap()->max_workers();
510 uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
511 while (worker_id < max_workers) {
512 if (UseCompressedOops) {
513 process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
514 } else {
515 process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
516 }
548 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
549
550 Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
551
552 // Process discovered lists
553 ShenandoahReferenceProcessorTask task(phase, concurrent, this);
554 workers->run_task(&task);
555
556 // Update SoftReference clock
557 soft_reference_update_clock();
558
559 // Collect, log and trace statistics
560 collect_statistics();
561
562 enqueue_references(concurrent);
563 }
564
565 void ShenandoahReferenceProcessor::enqueue_references_locked() {
566 // Prepend internal pending list to external pending list
567 shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
568
569 // During reference processing, we maintain a local list of references that are identified by
570 // _pending_list and _pending_list_tail. _pending_list_tail points to the next field of the last Reference object on
571 // the local list.
572 //
573 // There is also a global list of reference identified by Universe::_reference_pending_list
574
575 // The following code has the effect of:
576 // 1. Making the global Universe::_reference_pending_list point to my local list
577 // 2. Overwriting the next field of the last Reference on my local list to point at the previous head of the
578 // global Universe::_reference_pending_list
579
580 oop former_head_of_global_list = Universe::swap_reference_pending_list(_pending_list);
581 if (UseCompressedOops) {
582 set_oop_field<narrowOop>(reinterpret_cast<narrowOop*>(_pending_list_tail), former_head_of_global_list);
583 } else {
584 set_oop_field<oop>(reinterpret_cast<oop*>(_pending_list_tail), former_head_of_global_list);
585 }
586 }
587
588 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
589 if (_pending_list == nullptr) {
590 // Nothing to enqueue
591 return;
592 }
593 if (!concurrent) {
594 // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
595 enqueue_references_locked();
596 } else {
597 // Heap_lock protects external pending list
598 MonitorLocker ml(Heap_lock);
599
600 enqueue_references_locked();
601
602 // Notify ReferenceHandler thread
603 ml.notify_all();
604 }
605
606 // Reset internal pending list
607 _pending_list = nullptr;
608 _pending_list_tail = &_pending_list;
609 }
610
611 template<typename T>
612 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
|