1 /*
  2  * Copyright (c) 2015, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "classfile/javaClasses.hpp"
 28 #include "gc/shared/workerThread.hpp"
 29 #include "gc/shenandoah/mode/shenandoahGenerationalMode.hpp"
 30 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 31 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
 32 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 33 #include "gc/shenandoah/shenandoahUtils.hpp"
 34 #include "runtime/atomic.hpp"
 35 #include "logging/log.hpp"
 36 
 37 static ReferenceType reference_type(oop reference) {
 38   return InstanceKlass::cast(reference->klass())->reference_type();
 39 }
 40 
 41 static const char* reference_type_name(ReferenceType type) {
 42   switch (type) {
 43     case REF_SOFT:
 44       return "Soft";
 45 
 46     case REF_WEAK:
 47       return "Weak";
 48 
 49     case REF_FINAL:
 50       return "Final";
 51 
 52     case REF_PHANTOM:
 53       return "Phantom";
 54 
 55     default:
 56       ShouldNotReachHere();
 57       return NULL;
 58   }
 59 }
 60 
 61 template <typename T>
 62 static void set_oop_field(T* field, oop value);
 63 
 64 template <>
 65 void set_oop_field<oop>(oop* field, oop value) {
 66   *field = value;
 67 }
 68 
 69 template <>
 70 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
 71   *field = CompressedOops::encode(value);
 72 }
 73 
 74 static oop lrb(oop obj) {
 75   if (obj != NULL && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
 76     return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
 77   } else {
 78     return obj;
 79   }
 80 }
 81 
 82 template <typename T>
 83 static volatile T* reference_referent_addr(oop reference) {
 84   return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
 85 }
 86 
 87 template <typename T>
 88 static oop reference_referent(oop reference) {
 89   T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
 90   return CompressedOops::decode(heap_oop);
 91 }
 92 
 93 static void reference_clear_referent(oop reference) {
 94   java_lang_ref_Reference::clear_referent(reference);
 95 }
 96 
 97 template <typename T>
 98 static T* reference_discovered_addr(oop reference) {
 99   return reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
100 }
101 
102 template <typename T>
103 static oop reference_discovered(oop reference) {
104   T heap_oop = *reference_discovered_addr<T>(reference);
105   return lrb(CompressedOops::decode(heap_oop));
106 }
107 
108 template <typename T>
109 static void reference_set_discovered(oop reference, oop discovered);
110 
111 template <>
112 void reference_set_discovered<oop>(oop reference, oop discovered) {
113   *reference_discovered_addr<oop>(reference) = discovered;
114 }
115 
116 template <>
117 void reference_set_discovered<narrowOop>(oop reference, oop discovered) {
118   *reference_discovered_addr<narrowOop>(reference) = CompressedOops::encode(discovered);
119 }
120 
121 template<typename T>
122 static bool reference_cas_discovered(oop reference, oop discovered) {
123   T* addr = reinterpret_cast<T *>(java_lang_ref_Reference::discovered_addr_raw(reference));
124   return ShenandoahHeap::atomic_update_oop_check(discovered, addr, NULL);
125 }
126 
127 template <typename T>
128 static T* reference_next_addr(oop reference) {
129   return reinterpret_cast<T*>(java_lang_ref_Reference::next_addr_raw(reference));
130 }
131 
132 template <typename T>
133 static oop reference_next(oop reference) {
134   T heap_oop = RawAccess<>::oop_load(reference_next_addr<T>(reference));
135   return lrb(CompressedOops::decode(heap_oop));
136 }
137 
138 static void reference_set_next(oop reference, oop next) {
139   java_lang_ref_Reference::set_next_raw(reference, next);
140 }
141 
142 static void soft_reference_update_clock() {
143   const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
144   java_lang_ref_SoftReference::set_clock(now);
145 }
146 
147 ShenandoahRefProcThreadLocal::ShenandoahRefProcThreadLocal() :
148   _discovered_list(NULL),
149   _encountered_count(),
150   _discovered_count(),
151   _enqueued_count() {
152 }
153 
154 void ShenandoahRefProcThreadLocal::reset() {
155   _discovered_list = NULL;
156   _mark_closure = NULL;
157   for (uint i = 0; i < reference_type_count; i++) {
158     _encountered_count[i] = 0;
159     _discovered_count[i] = 0;
160     _enqueued_count[i] = 0;
161   }
162 }
163 
164 template <typename T>
165 T* ShenandoahRefProcThreadLocal::discovered_list_addr() {
166   return reinterpret_cast<T*>(&_discovered_list);
167 }
168 
169 template <>
170 oop ShenandoahRefProcThreadLocal::discovered_list_head<oop>() const {
171   return *reinterpret_cast<const oop*>(&_discovered_list);
172 }
173 
174 template <>
175 oop ShenandoahRefProcThreadLocal::discovered_list_head<narrowOop>() const {
176   return CompressedOops::decode(*reinterpret_cast<const narrowOop*>(&_discovered_list));
177 }
178 
179 template <>
180 void ShenandoahRefProcThreadLocal::set_discovered_list_head<narrowOop>(oop head) {
181   *discovered_list_addr<narrowOop>() = CompressedOops::encode(head);
182 }
183 
184 template <>
185 void ShenandoahRefProcThreadLocal::set_discovered_list_head<oop>(oop head) {
186   *discovered_list_addr<oop>() = head;
187 }
188 
189 ShenandoahReferenceProcessor::ShenandoahReferenceProcessor(uint max_workers) :
190   _soft_reference_policy(NULL),
191   _ref_proc_thread_locals(NEW_C_HEAP_ARRAY(ShenandoahRefProcThreadLocal, max_workers, mtGC)),
192   _pending_list(NULL),
193   _pending_list_tail(&_pending_list),
194   _iterate_discovered_list_id(0U),
195   _stats() {
196   for (size_t i = 0; i < max_workers; i++) {
197     _ref_proc_thread_locals[i].reset();
198   }
199 }
200 
201 void ShenandoahReferenceProcessor::reset_thread_locals() {
202   uint max_workers = ShenandoahHeap::heap()->max_workers();
203   for (uint i = 0; i < max_workers; i++) {
204     _ref_proc_thread_locals[i].reset();
205   }
206 }
207 
208 void ShenandoahReferenceProcessor::set_mark_closure(uint worker_id, ShenandoahMarkRefsSuperClosure* mark_closure) {
209   _ref_proc_thread_locals[worker_id].set_mark_closure(mark_closure);
210 }
211 
212 void ShenandoahReferenceProcessor::set_soft_reference_policy(bool clear) {
213   static AlwaysClearPolicy always_clear_policy;
214   static LRUMaxHeapPolicy lru_max_heap_policy;
215 
216   if (clear) {
217     log_info(gc, ref)("Clearing All SoftReferences");
218     _soft_reference_policy = &always_clear_policy;
219   } else {
220     _soft_reference_policy = &lru_max_heap_policy;
221   }
222 
223   _soft_reference_policy->setup();
224 }
225 
226 template <typename T>
227 bool ShenandoahReferenceProcessor::is_inactive(oop reference, oop referent, ReferenceType type) const {
228   if (type == REF_FINAL) {
229     // A FinalReference is inactive if its next field is non-null. An application can't
230     // call enqueue() or clear() on a FinalReference.
231     return reference_next<T>(reference) != NULL;
232   } else {
233     // A non-FinalReference is inactive if the referent is null. The referent can only
234     // be null if the application called Reference.enqueue() or Reference.clear().
235     return referent == NULL;
236   }
237 }
238 
239 bool ShenandoahReferenceProcessor::is_strongly_live(oop referent) const {
240   return ShenandoahHeap::heap()->marking_context()->is_marked_strong(referent);
241 }
242 
243 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
244   if (type != REF_SOFT) {
245     // Not a SoftReference
246     return false;
247   }
248 
249   // Ask SoftReference policy
250   const jlong clock = java_lang_ref_SoftReference::clock();
251   assert(clock != 0, "Clock not initialized");
252   assert(_soft_reference_policy != NULL, "Policy not initialized");
253   return !_soft_reference_policy->should_clear_reference(reference, clock);
254 }
255 
256 template <typename T>
257 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
258   T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
259   T heap_oop = RawAccess<>::oop_load(referent_addr);
260   oop referent = CompressedOops::decode(heap_oop);
261   ShenandoahHeap* heap = ShenandoahHeap::heap();
262 
263   if (is_inactive<T>(reference, referent, type)) {
264     log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
265     return false;
266   }
267 
268   if (is_strongly_live(referent)) {
269     log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
270     return false;
271   }
272 
273   if (is_softly_live(reference, type)) {
274     log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
275     return false;
276   }
277 
278   if (!heap->is_in_active_generation(referent)) {
279     log_trace(gc,ref)("Referent outside of active generation: " PTR_FORMAT, p2i(referent));
280     return false;
281   }
282 
283   return true;
284 }
285 
286 template <typename T>
287 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
288   const oop referent = reference_referent<T>(reference);
289   if (referent == NULL) {
290     // Reference has been cleared, by a call to Reference.enqueue()
291     // or Reference.clear() from the application, which means we
292     // should drop the reference.
293     return true;
294   }
295 
296   // Check if the referent is still alive, in which case we should
297   // drop the reference.
298   if (type == REF_PHANTOM) {
299     return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
300   } else {
301     return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
302   }
303 }
304 
305 template <typename T>
306 void ShenandoahReferenceProcessor::make_inactive(oop reference, ReferenceType type) const {
307   if (type == REF_FINAL) {
308     // Don't clear referent. It is needed by the Finalizer thread to make the call
309     // to finalize(). A FinalReference is instead made inactive by self-looping the
310     // next field. An application can't call FinalReference.enqueue(), so there is
311     // no race to worry about when setting the next field.
312     assert(reference_next<T>(reference) == NULL, "Already inactive");
313     assert(ShenandoahHeap::heap()->marking_context()->is_marked(reference_referent<T>(reference)), "only make inactive final refs with alive referents");
314     reference_set_next(reference, reference);
315   } else {
316     // Clear referent
317     reference_clear_referent(reference);
318   }
319 }
320 
321 template <typename T>
322 bool ShenandoahReferenceProcessor::discover(oop reference, ReferenceType type, uint worker_id) {
323   if (!should_discover<T>(reference, type)) {
324     // Not discovered
325     return false;
326   }
327 
328   if (reference_discovered<T>(reference) != NULL) {
329     // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
330     // in which case it will be seen 2x by marking.
331     log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
332     return true;
333   }
334 
335   if (type == REF_FINAL) {
336     ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
337     bool weak = cl->is_weak();
338     cl->set_weak(true);
339     if (UseCompressedOops) {
340       cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
341     } else {
342       cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
343     }
344     cl->set_weak(weak);
345   }
346 
347   // Add reference to discovered list
348   ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
349   oop discovered_head = refproc_data.discovered_list_head<T>();
350   if (discovered_head == NULL) {
351     // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
352     // discovered field: if it is NULL, then it is not-yet discovered, otherwise it is discovered
353     discovered_head = reference;
354   }
355   if (reference_cas_discovered<T>(reference, discovered_head)) {
356     refproc_data.set_discovered_list_head<T>(reference);
357     assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
358     log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
359     _ref_proc_thread_locals[worker_id].inc_discovered(type);
360   }
361   return true;
362 }
363 
364 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
365   if (!RegisterReferences) {
366     // Reference processing disabled
367     return false;
368   }
369 
370   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s, %s)",
371           p2i(reference), reference_type_name(type), affiliation_name(reference));
372   uint worker_id = WorkerThread::worker_id();
373   _ref_proc_thread_locals->inc_encountered(type);
374 
375   if (UseCompressedOops) {
376     return discover<narrowOop>(reference, type, worker_id);
377   } else {
378     return discover<oop>(reference, type, worker_id);
379   }
380 }
381 
382 template <typename T>
383 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
384   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
385 
386   ShenandoahHeap* heap = ShenandoahHeap::heap();
387   oop referent = reference_referent<T>(reference);
388   assert(referent == NULL || heap->marking_context()->is_marked(referent), "only drop references with alive referents");
389 
390   // Unlink and return next in list
391   oop next = reference_discovered<T>(reference);
392   reference_set_discovered<T>(reference, NULL);
393   // When this reference was discovered, it would not have been marked. If it ends up surviving
394   // the cycle, we need to dirty the card if the reference is old and the referent is young.  Note
395   // that if the reference is not dropped, then its pointer to the referent will be nulled before
396   // evacuation begins so card does not need to be dirtied.
397   if (heap->mode()->is_generational() && heap->is_in_old(reference) && heap->is_in_young(referent)) {
398     // Note: would be sufficient to mark only the card that holds the start of this Reference object.
399     heap->card_scan()->mark_range_as_dirty(cast_from_oop<HeapWord*>(reference), reference->size());
400   }
401   return next;
402 }
403 
404 template <typename T>
405 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
406   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
407 
408   // Update statistics
409   _ref_proc_thread_locals[worker_id].inc_enqueued(type);
410 
411   // Make reference inactive
412   make_inactive<T>(reference, type);
413 
414   // Return next in list
415   return reference_discovered_addr<T>(reference);
416 }
417 
418 template <typename T>
419 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {;
420   log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
421   T* list = refproc_data.discovered_list_addr<T>();
422   // The list head is basically a GC root, we need to resolve and update it,
423   // otherwise we will later swap a from-space ref into Universe::pending_list().
424   if (!CompressedOops::is_null(*list)) {
425     oop first_resolved = lrb(CompressedOops::decode_not_null(*list));
426     set_oop_field(list, first_resolved);
427   }
428   T* p = list;
429   while (true) {
430     const oop reference = lrb(CompressedOops::decode(*p));
431     if (reference == NULL) {
432       break;
433     }
434     log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
435     const ReferenceType type = reference_type(reference);
436 
437     if (should_drop<T>(reference, type)) {
438       set_oop_field(p, drop<T>(reference, type));
439     } else {
440       p = keep<T>(reference, type, worker_id);
441     }
442 
443     const oop discovered = lrb(reference_discovered<T>(reference));
444     if (reference == discovered) {
445       // Reset terminating self-loop to NULL
446       reference_set_discovered<T>(reference, oop(NULL));
447       break;
448     }
449   }
450 
451   // Prepend discovered references to internal pending list
452   if (!CompressedOops::is_null(*list)) {
453     oop head = lrb(CompressedOops::decode_not_null(*list));
454     shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
455     oop prev = Atomic::xchg(&_pending_list, head);
456     RawAccess<>::oop_store(p, prev);
457     if (prev == NULL) {
458       // First to prepend to list, record tail
459       _pending_list_tail = reinterpret_cast<void*>(p);
460     }
461 
462     // Clear discovered list
463     set_oop_field(list, oop(NULL));
464   }
465 }
466 
467 void ShenandoahReferenceProcessor::work() {
468   // Process discovered references
469   uint max_workers = ShenandoahHeap::heap()->max_workers();
470   uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
471   while (worker_id < max_workers) {
472     if (UseCompressedOops) {
473       process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
474     } else {
475       process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
476     }
477     worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
478   }
479 }
480 
481 class ShenandoahReferenceProcessorTask : public WorkerTask {
482 private:
483   bool const                          _concurrent;
484   ShenandoahPhaseTimings::Phase const _phase;
485   ShenandoahReferenceProcessor* const _reference_processor;
486 
487 public:
488   ShenandoahReferenceProcessorTask(ShenandoahPhaseTimings::Phase phase, bool concurrent, ShenandoahReferenceProcessor* reference_processor) :
489     WorkerTask("ShenandoahReferenceProcessorTask"),
490     _concurrent(concurrent),
491     _phase(phase),
492     _reference_processor(reference_processor) {
493   }
494 
495   virtual void work(uint worker_id) {
496     if (_concurrent) {
497       ShenandoahConcurrentWorkerSession worker_session(worker_id);
498       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
499       _reference_processor->work();
500     } else {
501       ShenandoahParallelWorkerSession worker_session(worker_id);
502       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
503       _reference_processor->work();
504     }
505   }
506 };
507 
508 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
509 
510   Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
511 
512   // Process discovered lists
513   ShenandoahReferenceProcessorTask task(phase, concurrent, this);
514   workers->run_task(&task);
515 
516   // Update SoftReference clock
517   soft_reference_update_clock();
518 
519   // Collect, log and trace statistics
520   collect_statistics();
521 
522   enqueue_references(concurrent);
523 }
524 
525 void ShenandoahReferenceProcessor::enqueue_references_locked() {
526   // Prepend internal pending list to external pending list
527   shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
528   if (UseCompressedOops) {
529     *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
530   } else {
531     *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
532   }
533 }
534 
535 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
536   if (_pending_list == NULL) {
537     // Nothing to enqueue
538     return;
539   }
540   if (!concurrent) {
541     // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
542     enqueue_references_locked();
543   } else {
544     // Heap_lock protects external pending list
545     MonitorLocker ml(Heap_lock);
546 
547     enqueue_references_locked();
548 
549     // Notify ReferenceHandler thread
550     ml.notify_all();
551   }
552 
553   // Reset internal pending list
554   _pending_list = NULL;
555   _pending_list_tail = &_pending_list;
556 }
557 
558 template<typename T>
559 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
560   T discovered = *list;
561   while (!CompressedOops::is_null(discovered)) {
562     oop discovered_ref = CompressedOops::decode_not_null(discovered);
563     set_oop_field<T>(list, oop(NULL));
564     list = reference_discovered_addr<T>(discovered_ref);
565     discovered = *list;
566   }
567 }
568 
569 void ShenandoahReferenceProcessor::abandon_partial_discovery() {
570   uint max_workers = ShenandoahHeap::heap()->max_workers();
571   for (uint index = 0; index < max_workers; index++) {
572     if (UseCompressedOops) {
573       clean_discovered_list<narrowOop>(_ref_proc_thread_locals[index].discovered_list_addr<narrowOop>());
574     } else {
575       clean_discovered_list<oop>(_ref_proc_thread_locals[index].discovered_list_addr<oop>());
576     }
577   }
578   if (_pending_list != NULL) {
579     oop pending = _pending_list;
580     _pending_list = NULL;
581     if (UseCompressedOops) {
582       narrowOop* list = reference_discovered_addr<narrowOop>(pending);
583       clean_discovered_list<narrowOop>(list);
584     } else {
585       oop* list = reference_discovered_addr<oop>(pending);
586       clean_discovered_list<oop>(list);
587     }
588   }
589   _pending_list_tail = &_pending_list;
590 }
591 
592 void ShenandoahReferenceProcessor::collect_statistics() {
593   Counters encountered = {};
594   Counters discovered = {};
595   Counters enqueued = {};
596   uint max_workers = ShenandoahHeap::heap()->max_workers();
597   for (uint i = 0; i < max_workers; i++) {
598     for (size_t type = 0; type < reference_type_count; type++) {
599       encountered[type] += _ref_proc_thread_locals[i].encountered((ReferenceType)type);
600       discovered[type] += _ref_proc_thread_locals[i].discovered((ReferenceType)type);
601       enqueued[type] += _ref_proc_thread_locals[i].enqueued((ReferenceType)type);
602     }
603   }
604 
605   _stats = ReferenceProcessorStats(discovered[REF_SOFT],
606                                    discovered[REF_WEAK],
607                                    discovered[REF_FINAL],
608                                    discovered[REF_PHANTOM]);
609 
610   log_info(gc,ref)("Encountered references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
611                    encountered[REF_SOFT], encountered[REF_WEAK], encountered[REF_FINAL], encountered[REF_PHANTOM]);
612   log_info(gc,ref)("Discovered  references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
613                    discovered[REF_SOFT], discovered[REF_WEAK], discovered[REF_FINAL], discovered[REF_PHANTOM]);
614   log_info(gc,ref)("Enqueued    references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
615                    enqueued[REF_SOFT], enqueued[REF_WEAK], enqueued[REF_FINAL], enqueued[REF_PHANTOM]);
616 }
617