1 /*
  2  * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
  4  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  6  *
  7  * This code is free software; you can redistribute it and/or modify it
  8  * under the terms of the GNU General Public License version 2 only, as
  9  * published by the Free Software Foundation.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #include "precompiled.hpp"
 28 #include "classfile/javaClasses.hpp"
 29 #include "gc/shared/workerThread.hpp"
 30 #include "gc/shenandoah/shenandoahGeneration.hpp"
 31 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 32 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
 33 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 34 #include "gc/shenandoah/shenandoahUtils.hpp"
 35 #include "runtime/atomic.hpp"
 36 #include "logging/log.hpp"
 37 
 38 static ReferenceType reference_type(oop reference) {
 39   return InstanceKlass::cast(reference->klass())->reference_type();
 40 }
 41 
 42 static const char* reference_type_name(ReferenceType type) {
 43   switch (type) {
 44     case REF_SOFT:
 45       return "Soft";
 46 
 47     case REF_WEAK:
 48       return "Weak";
 49 
 50     case REF_FINAL:
 51       return "Final";
 52 
 53     case REF_PHANTOM:
 54       return "Phantom";
 55 
 56     default:
 57       ShouldNotReachHere();
 58       return nullptr;
 59   }
 60 }
 61 
 62 template <typename T>
 63 static void card_mark_barrier(T* field, oop value) {
 64   ShenandoahHeap* heap = ShenandoahHeap::heap();
 65   assert(heap->is_in_or_null(value), "Should be in heap");
 66   if (heap->mode()->is_generational() && heap->is_in_old(field) && heap->is_in_young(value)) {
 67     // For Shenandoah, each generation collects all the _referents_ that belong to the
 68     // collected generation. We can end up with discovered lists that contain a mixture
 69     // of old and young _references_. These references are linked together through the
 70     // discovered field in java.lang.Reference. In some cases, creating or editing this
 71     // list may result in the creation of _new_ old-to-young pointers which must dirty
 72     // the corresponding card. Failing to do this may cause heap verification errors and
 73     // lead to incorrect GC behavior.
 74     heap->card_scan()->mark_card_as_dirty(reinterpret_cast<HeapWord*>(field));
 75   }
 76 }
 77 
 78 template <typename T>
 79 static void set_oop_field(T* field, oop value);
 80 
 81 template <>
 82 void set_oop_field<oop>(oop* field, oop value) {
 83   *field = value;
 84   card_mark_barrier(field, value);
 85 }
 86 
 87 template <>
 88 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
 89   *field = CompressedOops::encode(value);
 90   card_mark_barrier(field, value);
 91 }
 92 
 93 static oop lrb(oop obj) {
 94   if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
 95     return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
 96   } else {
 97     return obj;
 98   }
 99 }
100 
101 template <typename T>
102 static volatile T* reference_referent_addr(oop reference) {
103   return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
104 }
105 
106 template <typename T>
107 static oop reference_referent(oop reference) {
108   T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
109   return CompressedOops::decode(heap_oop);
110 }
111 
112 static void reference_clear_referent(oop reference) {
113   java_lang_ref_Reference::clear_referent_raw(reference);
114 }
115 
116 template <typename T>
117 static T* reference_discovered_addr(oop reference) {
118   return reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
119 }
120 
121 template <typename T>
122 static oop reference_discovered(oop reference) {
123   T heap_oop = *reference_discovered_addr<T>(reference);
124   return lrb(CompressedOops::decode(heap_oop));
125 }
126 
127 template <typename T>
128 static void reference_set_discovered(oop reference, oop discovered);
129 
130 template <>
131 void reference_set_discovered<oop>(oop reference, oop discovered) {
132   *reference_discovered_addr<oop>(reference) = discovered;
133 }
134 
135 template <>
136 void reference_set_discovered<narrowOop>(oop reference, oop discovered) {
137   *reference_discovered_addr<narrowOop>(reference) = CompressedOops::encode(discovered);
138 }
139 
140 template<typename T>
141 static bool reference_cas_discovered(oop reference, oop discovered) {
142   T* addr = reinterpret_cast<T *>(java_lang_ref_Reference::discovered_addr_raw(reference));
143   return ShenandoahHeap::atomic_update_oop_check(discovered, addr, nullptr);
144 }
145 
146 template <typename T>
147 static T* reference_next_addr(oop reference) {
148   return reinterpret_cast<T*>(java_lang_ref_Reference::next_addr_raw(reference));
149 }
150 
151 template <typename T>
152 static oop reference_next(oop reference) {
153   T heap_oop = RawAccess<>::oop_load(reference_next_addr<T>(reference));
154   return lrb(CompressedOops::decode(heap_oop));
155 }
156 
157 static void reference_set_next(oop reference, oop next) {
158   java_lang_ref_Reference::set_next_raw(reference, next);
159 }
160 
161 static void soft_reference_update_clock() {
162   const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
163   java_lang_ref_SoftReference::set_clock(now);
164 }
165 
166 ShenandoahRefProcThreadLocal::ShenandoahRefProcThreadLocal() :
167   _discovered_list(nullptr),
168   _encountered_count(),
169   _discovered_count(),
170   _enqueued_count() {
171 }
172 
173 void ShenandoahRefProcThreadLocal::reset() {
174   _discovered_list = nullptr;
175   _mark_closure = nullptr;
176   for (uint i = 0; i < reference_type_count; i++) {
177     _encountered_count[i] = 0;
178     _discovered_count[i] = 0;
179     _enqueued_count[i] = 0;
180   }
181 }
182 
183 template <typename T>
184 T* ShenandoahRefProcThreadLocal::discovered_list_addr() {
185   return reinterpret_cast<T*>(&_discovered_list);
186 }
187 
188 template <>
189 oop ShenandoahRefProcThreadLocal::discovered_list_head<oop>() const {
190   return *reinterpret_cast<const oop*>(&_discovered_list);
191 }
192 
193 template <>
194 oop ShenandoahRefProcThreadLocal::discovered_list_head<narrowOop>() const {
195   return CompressedOops::decode(*reinterpret_cast<const narrowOop*>(&_discovered_list));
196 }
197 
198 template <>
199 void ShenandoahRefProcThreadLocal::set_discovered_list_head<narrowOop>(oop head) {
200   *discovered_list_addr<narrowOop>() = CompressedOops::encode(head);
201 }
202 
203 template <>
204 void ShenandoahRefProcThreadLocal::set_discovered_list_head<oop>(oop head) {
205   *discovered_list_addr<oop>() = head;
206 }
207 
208 ShenandoahReferenceProcessor::ShenandoahReferenceProcessor(uint max_workers) :
209   _soft_reference_policy(nullptr),
210   _ref_proc_thread_locals(NEW_C_HEAP_ARRAY(ShenandoahRefProcThreadLocal, max_workers, mtGC)),
211   _pending_list(nullptr),
212   _pending_list_tail(&_pending_list),
213   _iterate_discovered_list_id(0U),
214   _stats() {
215   for (size_t i = 0; i < max_workers; i++) {
216     _ref_proc_thread_locals[i].reset();
217   }
218 }
219 
220 void ShenandoahReferenceProcessor::reset_thread_locals() {
221   uint max_workers = ShenandoahHeap::heap()->max_workers();
222   for (uint i = 0; i < max_workers; i++) {
223     _ref_proc_thread_locals[i].reset();
224   }
225 }
226 
227 void ShenandoahReferenceProcessor::set_mark_closure(uint worker_id, ShenandoahMarkRefsSuperClosure* mark_closure) {
228   _ref_proc_thread_locals[worker_id].set_mark_closure(mark_closure);
229 }
230 
231 void ShenandoahReferenceProcessor::set_soft_reference_policy(bool clear) {
232   static AlwaysClearPolicy always_clear_policy;
233   static LRUMaxHeapPolicy lru_max_heap_policy;
234 
235   if (clear) {
236     log_info(gc, ref)("Clearing All SoftReferences");
237     _soft_reference_policy = &always_clear_policy;
238   } else {
239     _soft_reference_policy = &lru_max_heap_policy;
240   }
241 
242   _soft_reference_policy->setup();
243 }
244 
245 template <typename T>
246 bool ShenandoahReferenceProcessor::is_inactive(oop reference, oop referent, ReferenceType type) const {
247   if (type == REF_FINAL) {
248     // A FinalReference is inactive if its next field is non-null. An application can't
249     // call enqueue() or clear() on a FinalReference.
250     return reference_next<T>(reference) != nullptr;
251   } else {
252     // A non-FinalReference is inactive if the referent is null. The referent can only
253     // be null if the application called Reference.enqueue() or Reference.clear().
254     return referent == nullptr;
255   }
256 }
257 
258 bool ShenandoahReferenceProcessor::is_strongly_live(oop referent) const {
259   return ShenandoahHeap::heap()->marking_context()->is_marked_strong(referent);
260 }
261 
262 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
263   if (type != REF_SOFT) {
264     // Not a SoftReference
265     return false;
266   }
267 
268   // Ask SoftReference policy
269   const jlong clock = java_lang_ref_SoftReference::clock();
270   assert(clock != 0, "Clock not initialized");
271   assert(_soft_reference_policy != nullptr, "Policy not initialized");
272   return !_soft_reference_policy->should_clear_reference(reference, clock);
273 }
274 
275 template <typename T>
276 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
277   T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
278   T heap_oop = RawAccess<>::oop_load(referent_addr);
279   oop referent = CompressedOops::decode(heap_oop);
280   ShenandoahHeap* heap = ShenandoahHeap::heap();
281 
282   if (is_inactive<T>(reference, referent, type)) {
283     log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
284     return false;
285   }
286 
287   if (is_strongly_live(referent)) {
288     log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
289     return false;
290   }
291 
292   if (is_softly_live(reference, type)) {
293     log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
294     return false;
295   }
296 
297   if (!heap->is_in_active_generation(referent)) {
298     log_trace(gc,ref)("Referent outside of active generation: " PTR_FORMAT, p2i(referent));
299     return false;
300   }
301 
302   return true;
303 }
304 
305 template <typename T>
306 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
307   const oop referent = reference_referent<T>(reference);
308   if (referent == nullptr) {
309     // Reference has been cleared, by a call to Reference.enqueue()
310     // or Reference.clear() from the application, which means we
311     // should drop the reference.
312     return true;
313   }
314 
315   // Check if the referent is still alive, in which case we should
316   // drop the reference.
317   if (type == REF_PHANTOM) {
318     return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
319   } else {
320     return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
321   }
322 }
323 
324 template <typename T>
325 void ShenandoahReferenceProcessor::make_inactive(oop reference, ReferenceType type) const {
326   if (type == REF_FINAL) {
327     // Don't clear referent. It is needed by the Finalizer thread to make the call
328     // to finalize(). A FinalReference is instead made inactive by self-looping the
329     // next field. An application can't call FinalReference.enqueue(), so there is
330     // no race to worry about when setting the next field.
331     assert(reference_next<T>(reference) == nullptr, "Already inactive");
332     assert(ShenandoahHeap::heap()->marking_context()->is_marked(reference_referent<T>(reference)), "only make inactive final refs with alive referents");
333     reference_set_next(reference, reference);
334   } else {
335     // Clear referent
336     reference_clear_referent(reference);
337   }
338 }
339 
340 template <typename T>
341 bool ShenandoahReferenceProcessor::discover(oop reference, ReferenceType type, uint worker_id) {
342   if (!should_discover<T>(reference, type)) {
343     // Not discovered
344     return false;
345   }
346 
347   if (reference_discovered<T>(reference) != nullptr) {
348     // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
349     // in which case it will be seen 2x by marking.
350     log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
351     return true;
352   }
353 
354   if (type == REF_FINAL) {
355     ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
356     bool weak = cl->is_weak();
357     cl->set_weak(true);
358     if (UseCompressedOops) {
359       cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
360     } else {
361       cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
362     }
363     cl->set_weak(weak);
364   }
365 
366   // Add reference to discovered list
367   ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
368   oop discovered_head = refproc_data.discovered_list_head<T>();
369   if (discovered_head == nullptr) {
370     // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
371     // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
372     discovered_head = reference;
373   }
374   if (reference_cas_discovered<T>(reference, discovered_head)) {
375     refproc_data.set_discovered_list_head<T>(reference);
376     assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
377     log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
378     _ref_proc_thread_locals[worker_id].inc_discovered(type);
379   }
380   return true;
381 }
382 
383 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
384   if (!RegisterReferences) {
385     // Reference processing disabled
386     return false;
387   }
388 
389   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s, %s)",
390           p2i(reference), reference_type_name(type), ShenandoahHeap::heap()->heap_region_containing(reference)->affiliation_name());
391   uint worker_id = WorkerThread::worker_id();
392   _ref_proc_thread_locals->inc_encountered(type);
393 
394   if (UseCompressedOops) {
395     return discover<narrowOop>(reference, type, worker_id);
396   } else {
397     return discover<oop>(reference, type, worker_id);
398   }
399 }
400 
401 template <typename T>
402 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
403   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
404 
405   ShenandoahHeap* heap = ShenandoahHeap::heap();
406   oop referent = reference_referent<T>(reference);
407   assert(referent == nullptr || heap->marking_context()->is_marked(referent), "only drop references with alive referents");
408 
409   // Unlink and return next in list
410   oop next = reference_discovered<T>(reference);
411   reference_set_discovered<T>(reference, nullptr);
412   // When this reference was discovered, it would not have been marked. If it ends up surviving
413   // the cycle, we need to dirty the card if the reference is old and the referent is young.  Note
414   // that if the reference is not dropped, then its pointer to the referent will be nulled before
415   // evacuation begins so card does not need to be dirtied.
416   if (heap->mode()->is_generational() && heap->is_in_old(reference) && heap->is_in_young(referent)) {
417     // Note: would be sufficient to mark only the card that holds the start of this Reference object.
418     heap->card_scan()->mark_range_as_dirty(cast_from_oop<HeapWord*>(reference), reference->size());
419   }
420   return next;
421 }
422 
423 template <typename T>
424 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
425   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
426 
427   // Update statistics
428   _ref_proc_thread_locals[worker_id].inc_enqueued(type);
429 
430   // Make reference inactive
431   make_inactive<T>(reference, type);
432 
433   // Return next in list
434   return reference_discovered_addr<T>(reference);
435 }
436 
437 template <typename T>
438 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {;
439   log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
440   T* list = refproc_data.discovered_list_addr<T>();
441   // The list head is basically a GC root, we need to resolve and update it,
442   // otherwise we will later swap a from-space ref into Universe::pending_list().
443   if (!CompressedOops::is_null(*list)) {
444     oop first_resolved = lrb(CompressedOops::decode_not_null(*list));
445     set_oop_field(list, first_resolved);
446   }
447   T* p = list;
448   while (true) {
449     const oop reference = lrb(CompressedOops::decode(*p));
450     if (reference == nullptr) {
451       break;
452     }
453     log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
454     const ReferenceType type = reference_type(reference);
455 
456     if (should_drop<T>(reference, type)) {
457       set_oop_field(p, drop<T>(reference, type));
458     } else {
459       p = keep<T>(reference, type, worker_id);
460     }
461 
462     const oop discovered = lrb(reference_discovered<T>(reference));
463     if (reference == discovered) {
464       // Reset terminating self-loop to null
465       reference_set_discovered<T>(reference, oop(nullptr));
466       break;
467     }
468   }
469 
470   // Prepend discovered references to internal pending list
471   if (!CompressedOops::is_null(*list)) {
472     oop head = lrb(CompressedOops::decode_not_null(*list));
473     shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
474     oop prev = Atomic::xchg(&_pending_list, head);
475     RawAccess<>::oop_store(p, prev);
476     if (prev == nullptr) {
477       // First to prepend to list, record tail
478       _pending_list_tail = reinterpret_cast<void*>(p);
479     }
480 
481     // Clear discovered list
482     set_oop_field(list, oop(nullptr));
483   }
484 }
485 
486 void ShenandoahReferenceProcessor::work() {
487   // Process discovered references
488   uint max_workers = ShenandoahHeap::heap()->max_workers();
489   uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
490   while (worker_id < max_workers) {
491     if (UseCompressedOops) {
492       process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
493     } else {
494       process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
495     }
496     worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
497   }
498 }
499 
500 class ShenandoahReferenceProcessorTask : public WorkerTask {
501 private:
502   bool const                          _concurrent;
503   ShenandoahPhaseTimings::Phase const _phase;
504   ShenandoahReferenceProcessor* const _reference_processor;
505 
506 public:
507   ShenandoahReferenceProcessorTask(ShenandoahPhaseTimings::Phase phase, bool concurrent, ShenandoahReferenceProcessor* reference_processor) :
508     WorkerTask("ShenandoahReferenceProcessorTask"),
509     _concurrent(concurrent),
510     _phase(phase),
511     _reference_processor(reference_processor) {
512   }
513 
514   virtual void work(uint worker_id) {
515     if (_concurrent) {
516       ShenandoahConcurrentWorkerSession worker_session(worker_id);
517       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
518       _reference_processor->work();
519     } else {
520       ShenandoahParallelWorkerSession worker_session(worker_id);
521       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
522       _reference_processor->work();
523     }
524   }
525 };
526 
527 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
528 
529   Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
530 
531   // Process discovered lists
532   ShenandoahReferenceProcessorTask task(phase, concurrent, this);
533   workers->run_task(&task);
534 
535   // Update SoftReference clock
536   soft_reference_update_clock();
537 
538   // Collect, log and trace statistics
539   collect_statistics();
540 
541   enqueue_references(concurrent);
542 }
543 
544 void ShenandoahReferenceProcessor::enqueue_references_locked() {
545   // Prepend internal pending list to external pending list
546   shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
547   if (UseCompressedOops) {
548     *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
549   } else {
550     *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
551   }
552 }
553 
554 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
555   if (_pending_list == nullptr) {
556     // Nothing to enqueue
557     return;
558   }
559   if (!concurrent) {
560     // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
561     enqueue_references_locked();
562   } else {
563     // Heap_lock protects external pending list
564     MonitorLocker ml(Heap_lock);
565 
566     enqueue_references_locked();
567 
568     // Notify ReferenceHandler thread
569     ml.notify_all();
570   }
571 
572   // Reset internal pending list
573   _pending_list = nullptr;
574   _pending_list_tail = &_pending_list;
575 }
576 
577 template<typename T>
578 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
579   T discovered = *list;
580   while (!CompressedOops::is_null(discovered)) {
581     oop discovered_ref = CompressedOops::decode_not_null(discovered);
582     set_oop_field<T>(list, oop(nullptr));
583     list = reference_discovered_addr<T>(discovered_ref);
584     discovered = *list;
585   }
586 }
587 
588 void ShenandoahReferenceProcessor::abandon_partial_discovery() {
589   uint max_workers = ShenandoahHeap::heap()->max_workers();
590   for (uint index = 0; index < max_workers; index++) {
591     if (UseCompressedOops) {
592       clean_discovered_list<narrowOop>(_ref_proc_thread_locals[index].discovered_list_addr<narrowOop>());
593     } else {
594       clean_discovered_list<oop>(_ref_proc_thread_locals[index].discovered_list_addr<oop>());
595     }
596   }
597   if (_pending_list != nullptr) {
598     oop pending = _pending_list;
599     _pending_list = nullptr;
600     if (UseCompressedOops) {
601       narrowOop* list = reference_discovered_addr<narrowOop>(pending);
602       clean_discovered_list<narrowOop>(list);
603     } else {
604       oop* list = reference_discovered_addr<oop>(pending);
605       clean_discovered_list<oop>(list);
606     }
607   }
608   _pending_list_tail = &_pending_list;
609 }
610 
611 void ShenandoahReferenceProcessor::collect_statistics() {
612   Counters encountered = {};
613   Counters discovered = {};
614   Counters enqueued = {};
615   uint max_workers = ShenandoahHeap::heap()->max_workers();
616   for (uint i = 0; i < max_workers; i++) {
617     for (size_t type = 0; type < reference_type_count; type++) {
618       encountered[type] += _ref_proc_thread_locals[i].encountered((ReferenceType)type);
619       discovered[type] += _ref_proc_thread_locals[i].discovered((ReferenceType)type);
620       enqueued[type] += _ref_proc_thread_locals[i].enqueued((ReferenceType)type);
621     }
622   }
623 
624   _stats = ReferenceProcessorStats(discovered[REF_SOFT],
625                                    discovered[REF_WEAK],
626                                    discovered[REF_FINAL],
627                                    discovered[REF_PHANTOM]);
628 
629   log_info(gc,ref)("Encountered references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
630                    encountered[REF_SOFT], encountered[REF_WEAK], encountered[REF_FINAL], encountered[REF_PHANTOM]);
631   log_info(gc,ref)("Discovered  references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
632                    discovered[REF_SOFT], discovered[REF_WEAK], discovered[REF_FINAL], discovered[REF_PHANTOM]);
633   log_info(gc,ref)("Enqueued    references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
634                    enqueued[REF_SOFT], enqueued[REF_WEAK], enqueued[REF_FINAL], enqueued[REF_PHANTOM]);
635 }
636