1 /*
  2  * Copyright (c) 2015, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "classfile/javaClasses.hpp"
 28 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 29 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
 30 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 31 #include "gc/shenandoah/shenandoahUtils.hpp"
 32 #include "runtime/atomic.hpp"
 33 #include "logging/log.hpp"
 34 
 35 static ReferenceType reference_type(oop reference) {
 36   return InstanceKlass::cast(reference->klass())->reference_type();
 37 }
 38 
 39 static const char* reference_type_name(ReferenceType type) {
 40   switch (type) {
 41     case REF_SOFT:
 42       return "Soft";
 43 
 44     case REF_WEAK:
 45       return "Weak";
 46 
 47     case REF_FINAL:
 48       return "Final";
 49 
 50     case REF_PHANTOM:
 51       return "Phantom";
 52 
 53     default:
 54       ShouldNotReachHere();
 55       return NULL;
 56   }
 57 }
 58 
 59 template <typename T>
 60 static void set_oop_field(T* field, oop value);
 61 
 62 template <>
 63 void set_oop_field<oop>(oop* field, oop value) {
 64   *field = value;
 65 }
 66 
 67 template <>
 68 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
 69   *field = CompressedOops::encode(value);
 70 }
 71 
 72 static oop lrb(oop obj) {
 73   if (obj != NULL && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
 74     return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
 75   } else {
 76     return obj;
 77   }
 78 }
 79 
 80 template <typename T>
 81 static volatile T* reference_referent_addr(oop reference) {
 82   return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
 83 }
 84 
 85 template <typename T>
 86 static oop reference_referent(oop reference) {
 87   T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
 88   return CompressedOops::decode(heap_oop);
 89 }
 90 
 91 static void reference_clear_referent(oop reference) {
 92   java_lang_ref_Reference::clear_referent(reference);
 93 }
 94 
 95 template <typename T>
 96 static T* reference_discovered_addr(oop reference) {
 97   return reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
 98 }
 99 
100 template <typename T>
101 static oop reference_discovered(oop reference) {
102   T heap_oop = *reference_discovered_addr<T>(reference);
103   return lrb(CompressedOops::decode(heap_oop));
104 }
105 
106 template <typename T>
107 static void reference_set_discovered(oop reference, oop discovered);
108 
109 template <>
110 void reference_set_discovered<oop>(oop reference, oop discovered) {
111   *reference_discovered_addr<oop>(reference) = discovered;
112 }
113 
114 template <>
115 void reference_set_discovered<narrowOop>(oop reference, oop discovered) {
116   *reference_discovered_addr<narrowOop>(reference) = CompressedOops::encode(discovered);
117 }
118 
119 template<typename T>
120 static bool reference_cas_discovered(oop reference, oop discovered) {
121   T* addr = reinterpret_cast<T *>(java_lang_ref_Reference::discovered_addr_raw(reference));
122   return ShenandoahHeap::atomic_update_oop_check(discovered, addr, NULL);
123 }
124 
125 template <typename T>
126 static T* reference_next_addr(oop reference) {
127   return reinterpret_cast<T*>(java_lang_ref_Reference::next_addr_raw(reference));
128 }
129 
130 template <typename T>
131 static oop reference_next(oop reference) {
132   T heap_oop = RawAccess<>::oop_load(reference_next_addr<T>(reference));
133   return lrb(CompressedOops::decode(heap_oop));
134 }
135 
136 static void reference_set_next(oop reference, oop next) {
137   java_lang_ref_Reference::set_next_raw(reference, next);
138 }
139 
140 static void soft_reference_update_clock() {
141   const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
142   java_lang_ref_SoftReference::set_clock(now);
143 }
144 
145 ShenandoahRefProcThreadLocal::ShenandoahRefProcThreadLocal() :
146   _discovered_list(NULL),
147   _encountered_count(),
148   _discovered_count(),
149   _enqueued_count() {
150 }
151 
152 void ShenandoahRefProcThreadLocal::reset() {
153   _discovered_list = NULL;
154   _mark_closure = NULL;
155   for (uint i = 0; i < reference_type_count; i++) {
156     _encountered_count[i] = 0;
157     _discovered_count[i] = 0;
158     _enqueued_count[i] = 0;
159   }
160 }
161 
162 template <typename T>
163 T* ShenandoahRefProcThreadLocal::discovered_list_addr() {
164   return reinterpret_cast<T*>(&_discovered_list);
165 }
166 
167 template <>
168 oop ShenandoahRefProcThreadLocal::discovered_list_head<oop>() const {
169   return *reinterpret_cast<const oop*>(&_discovered_list);
170 }
171 
172 template <>
173 oop ShenandoahRefProcThreadLocal::discovered_list_head<narrowOop>() const {
174   return CompressedOops::decode(*reinterpret_cast<const narrowOop*>(&_discovered_list));
175 }
176 
177 template <>
178 void ShenandoahRefProcThreadLocal::set_discovered_list_head<narrowOop>(oop head) {
179   *discovered_list_addr<narrowOop>() = CompressedOops::encode(head);
180 }
181 
182 template <>
183 void ShenandoahRefProcThreadLocal::set_discovered_list_head<oop>(oop head) {
184   *discovered_list_addr<oop>() = head;
185 }
186 
187 ShenandoahReferenceProcessor::ShenandoahReferenceProcessor(uint max_workers) :
188   _soft_reference_policy(NULL),
189   _ref_proc_thread_locals(NEW_C_HEAP_ARRAY(ShenandoahRefProcThreadLocal, max_workers, mtGC)),
190   _pending_list(NULL),
191   _pending_list_tail(&_pending_list),
192   _iterate_discovered_list_id(0U),
193   _stats() {
194   for (size_t i = 0; i < max_workers; i++) {
195     _ref_proc_thread_locals[i].reset();
196   }
197 }
198 
199 void ShenandoahReferenceProcessor::reset_thread_locals() {
200   uint max_workers = ShenandoahHeap::heap()->max_workers();
201   for (uint i = 0; i < max_workers; i++) {
202     _ref_proc_thread_locals[i].reset();
203   }
204 }
205 
206 void ShenandoahReferenceProcessor::set_mark_closure(uint worker_id, ShenandoahMarkRefsSuperClosure* mark_closure) {
207   _ref_proc_thread_locals[worker_id].set_mark_closure(mark_closure);
208 }
209 
210 void ShenandoahReferenceProcessor::set_soft_reference_policy(bool clear) {
211   static AlwaysClearPolicy always_clear_policy;
212   static LRUMaxHeapPolicy lru_max_heap_policy;
213 
214   if (clear) {
215     log_info(gc, ref)("Clearing All SoftReferences");
216     _soft_reference_policy = &always_clear_policy;
217   } else {
218     _soft_reference_policy = &lru_max_heap_policy;
219   }
220 
221   _soft_reference_policy->setup();
222 }
223 
224 template <typename T>
225 bool ShenandoahReferenceProcessor::is_inactive(oop reference, oop referent, ReferenceType type) const {
226   if (type == REF_FINAL) {
227     // A FinalReference is inactive if its next field is non-null. An application can't
228     // call enqueue() or clear() on a FinalReference.
229     return reference_next<T>(reference) != NULL;
230   } else {
231     // A non-FinalReference is inactive if the referent is null. The referent can only
232     // be null if the application called Reference.enqueue() or Reference.clear().
233     return referent == NULL;
234   }
235 }
236 
237 bool ShenandoahReferenceProcessor::is_strongly_live(oop referent) const {
238   return ShenandoahHeap::heap()->marking_context()->is_marked_strong(referent);
239 }
240 
241 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
242   if (type != REF_SOFT) {
243     // Not a SoftReference
244     return false;
245   }
246 
247   // Ask SoftReference policy
248   const jlong clock = java_lang_ref_SoftReference::clock();
249   assert(clock != 0, "Clock not initialized");
250   assert(_soft_reference_policy != NULL, "Policy not initialized");
251   return !_soft_reference_policy->should_clear_reference(reference, clock);
252 }
253 
254 template <typename T>
255 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
256   T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
257   T heap_oop = RawAccess<>::oop_load(referent_addr);
258   oop referent = CompressedOops::decode(heap_oop);
259 
260   if (is_inactive<T>(reference, referent, type)) {
261     log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
262     return false;
263   }
264 
265   if (is_strongly_live(referent)) {
266     log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
267     return false;
268   }
269 
270   if (is_softly_live(reference, type)) {
271     log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
272     return false;
273   }
274 
275   return true;
276 }
277 
278 template <typename T>
279 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
280   const oop referent = reference_referent<T>(reference);
281   if (referent == NULL) {
282     // Reference has been cleared, by a call to Reference.enqueue()
283     // or Reference.clear() from the application, which means we
284     // should drop the reference.
285     return true;
286   }
287 
288   // Check if the referent is still alive, in which case we should
289   // drop the reference.
290   if (type == REF_PHANTOM) {
291     return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
292   } else {
293     return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
294   }
295 }
296 
297 template <typename T>
298 void ShenandoahReferenceProcessor::make_inactive(oop reference, ReferenceType type) const {
299   if (type == REF_FINAL) {
300     // Don't clear referent. It is needed by the Finalizer thread to make the call
301     // to finalize(). A FinalReference is instead made inactive by self-looping the
302     // next field. An application can't call FinalReference.enqueue(), so there is
303     // no race to worry about when setting the next field.
304     assert(reference_next<T>(reference) == NULL, "Already inactive");
305     assert(ShenandoahHeap::heap()->marking_context()->is_marked(reference_referent<T>(reference)), "only make inactive final refs with alive referents");
306     reference_set_next(reference, reference);
307   } else {
308     // Clear referent
309     reference_clear_referent(reference);
310   }
311 }
312 
313 template <typename T>
314 bool ShenandoahReferenceProcessor::discover(oop reference, ReferenceType type, uint worker_id) {
315   if (!should_discover<T>(reference, type)) {
316     // Not discovered
317     return false;
318   }
319 
320   if (reference_discovered<T>(reference) != NULL) {
321     // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
322     // in which case it will be seen 2x by marking.
323     log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
324     return true;
325   }
326 
327   if (type == REF_FINAL) {
328     ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
329     bool weak = cl->is_weak();
330     cl->set_weak(true);
331     if (UseCompressedOops) {
332       cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
333     } else {
334       cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
335     }
336     cl->set_weak(weak);
337   }
338 
339   // Add reference to discovered list
340   assert(worker_id != ShenandoahThreadLocalData::INVALID_WORKER_ID, "need valid worker ID");
341   ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
342   oop discovered_head = refproc_data.discovered_list_head<T>();
343   if (discovered_head == NULL) {
344     // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
345     // discovered field: if it is NULL, then it is not-yet discovered, otherwise it is discovered
346     discovered_head = reference;
347   }
348   if (reference_cas_discovered<T>(reference, discovered_head)) {
349     refproc_data.set_discovered_list_head<T>(reference);
350     assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
351     log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
352     _ref_proc_thread_locals[worker_id].inc_discovered(type);
353   }
354   return true;
355 }
356 
357 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
358   if (!RegisterReferences) {
359     // Reference processing disabled
360     return false;
361   }
362 
363   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
364   uint worker_id = ShenandoahThreadLocalData::worker_id(Thread::current());
365   _ref_proc_thread_locals->inc_encountered(type);
366 
367   if (UseCompressedOops) {
368     return discover<narrowOop>(reference, type, worker_id);
369   } else {
370     return discover<oop>(reference, type, worker_id);
371   }
372 }
373 
374 template <typename T>
375 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
376   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
377 
378 #ifdef ASSERT
379   oop referent = reference_referent<T>(reference);
380   assert(referent == NULL || ShenandoahHeap::heap()->marking_context()->is_marked(referent),
381          "only drop references with alive referents");
382 #endif
383 
384   // Unlink and return next in list
385   oop next = reference_discovered<T>(reference);
386   reference_set_discovered<T>(reference, NULL);
387   return next;
388 }
389 
390 template <typename T>
391 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
392   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
393 
394   // Update statistics
395   _ref_proc_thread_locals[worker_id].inc_enqueued(type);
396 
397   // Make reference inactive
398   make_inactive<T>(reference, type);
399 
400   // Return next in list
401   return reference_discovered_addr<T>(reference);
402 }
403 
404 template <typename T>
405 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {;
406   log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));
407   T* list = refproc_data.discovered_list_addr<T>();
408   // The list head is basically a GC root, we need to resolve and update it,
409   // otherwise we will later swap a from-space ref into Universe::pending_list().
410   if (!CompressedOops::is_null(*list)) {
411     oop first_resolved = lrb(CompressedOops::decode_not_null(*list));
412     set_oop_field(list, first_resolved);
413   }
414   T* p = list;
415   while (true) {
416     const oop reference = lrb(CompressedOops::decode(*p));
417     if (reference == NULL) {
418       break;
419     }
420     log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
421     const ReferenceType type = reference_type(reference);
422 
423     if (should_drop<T>(reference, type)) {
424       set_oop_field(p, drop<T>(reference, type));
425     } else {
426       p = keep<T>(reference, type, worker_id);
427     }
428 
429     const oop discovered = lrb(reference_discovered<T>(reference));
430     if (reference == discovered) {
431       // Reset terminating self-loop to NULL
432       reference_set_discovered<T>(reference, oop(NULL));
433       break;
434     }
435   }
436 
437   // Prepend discovered references to internal pending list
438   if (!CompressedOops::is_null(*list)) {
439     oop head = lrb(CompressedOops::decode_not_null(*list));
440     shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
441     oop prev = Atomic::xchg(&_pending_list, head);
442     RawAccess<>::oop_store(p, prev);
443     if (prev == NULL) {
444       // First to prepend to list, record tail
445       _pending_list_tail = reinterpret_cast<void*>(p);
446     }
447 
448     // Clear discovered list
449     set_oop_field(list, oop(NULL));
450   }
451 }
452 
453 void ShenandoahReferenceProcessor::work() {
454   // Process discovered references
455   uint max_workers = ShenandoahHeap::heap()->max_workers();
456   uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U) - 1;
457   while (worker_id < max_workers) {
458     if (UseCompressedOops) {
459       process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
460     } else {
461       process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
462     }
463     worker_id = Atomic::add(&_iterate_discovered_list_id, 1U) - 1;
464   }
465 }
466 
467 class ShenandoahReferenceProcessorTask : public AbstractGangTask {
468 private:
469   bool const                          _concurrent;
470   ShenandoahPhaseTimings::Phase const _phase;
471   ShenandoahReferenceProcessor* const _reference_processor;
472 
473 public:
474   ShenandoahReferenceProcessorTask(ShenandoahPhaseTimings::Phase phase, bool concurrent, ShenandoahReferenceProcessor* reference_processor) :
475     AbstractGangTask("ShenandoahReferenceProcessorTask"),
476     _concurrent(concurrent),
477     _phase(phase),
478     _reference_processor(reference_processor) {
479   }
480 
481   virtual void work(uint worker_id) {
482     if (_concurrent) {
483       ShenandoahConcurrentWorkerSession worker_session(worker_id);
484       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
485       _reference_processor->work();
486     } else {
487       ShenandoahParallelWorkerSession worker_session(worker_id);
488       ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id);
489       _reference_processor->work();
490     }
491   }
492 };
493 
494 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkGang* workers, bool concurrent) {
495 
496   Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
497 
498   // Process discovered lists
499   ShenandoahReferenceProcessorTask task(phase, concurrent, this);
500   workers->run_task(&task);
501 
502   // Update SoftReference clock
503   soft_reference_update_clock();
504 
505   // Collect, log and trace statistics
506   collect_statistics();
507 
508   enqueue_references(concurrent);
509 }
510 
511 void ShenandoahReferenceProcessor::enqueue_references_locked() {
512   // Prepend internal pending list to external pending list
513   shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
514   if (UseCompressedOops) {
515     *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
516   } else {
517     *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
518   }
519 }
520 
521 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
522   if (_pending_list == NULL) {
523     // Nothing to enqueue
524     return;
525   }
526 
527   if (!concurrent) {
528     // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
529     enqueue_references_locked();
530   } else {
531     // Heap_lock protects external pending list
532     MonitorLocker ml(Heap_lock);
533 
534     enqueue_references_locked();
535 
536     // Notify ReferenceHandler thread
537     ml.notify_all();
538   }
539 
540   // Reset internal pending list
541   _pending_list = NULL;
542   _pending_list_tail = &_pending_list;
543 }
544 
545 template<typename T>
546 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
547   T discovered = *list;
548   while (!CompressedOops::is_null(discovered)) {
549     oop discovered_ref = CompressedOops::decode_not_null(discovered);
550     set_oop_field<T>(list, oop(NULL));
551     list = reference_discovered_addr<T>(discovered_ref);
552     discovered = *list;
553   }
554 }
555 
556 void ShenandoahReferenceProcessor::abandon_partial_discovery() {
557   uint max_workers = ShenandoahHeap::heap()->max_workers();
558   for (uint index = 0; index < max_workers; index++) {
559     if (UseCompressedOops) {
560       clean_discovered_list<narrowOop>(_ref_proc_thread_locals[index].discovered_list_addr<narrowOop>());
561     } else {
562       clean_discovered_list<oop>(_ref_proc_thread_locals[index].discovered_list_addr<oop>());
563     }
564   }
565   if (_pending_list != NULL) {
566     oop pending = _pending_list;
567     _pending_list = NULL;
568     if (UseCompressedOops) {
569       narrowOop* list = reference_discovered_addr<narrowOop>(pending);
570       clean_discovered_list<narrowOop>(list);
571     } else {
572       oop* list = reference_discovered_addr<oop>(pending);
573       clean_discovered_list<oop>(list);
574     }
575   }
576   _pending_list_tail = &_pending_list;
577 }
578 
579 void ShenandoahReferenceProcessor::collect_statistics() {
580   Counters encountered = {};
581   Counters discovered = {};
582   Counters enqueued = {};
583   uint max_workers = ShenandoahHeap::heap()->max_workers();
584   for (uint i = 0; i < max_workers; i++) {
585     for (size_t type = 0; type < reference_type_count; type++) {
586       encountered[type] += _ref_proc_thread_locals[i].encountered((ReferenceType)type);
587       discovered[type] += _ref_proc_thread_locals[i].discovered((ReferenceType)type);
588       enqueued[type] += _ref_proc_thread_locals[i].enqueued((ReferenceType)type);
589     }
590   }
591 
592   _stats = ReferenceProcessorStats(discovered[REF_SOFT],
593                                    discovered[REF_WEAK],
594                                    discovered[REF_FINAL],
595                                    discovered[REF_PHANTOM]);
596 
597   log_info(gc,ref)("Encountered references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
598                    encountered[REF_SOFT], encountered[REF_WEAK], encountered[REF_FINAL], encountered[REF_PHANTOM]);
599   log_info(gc,ref)("Discovered  references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
600                    discovered[REF_SOFT], discovered[REF_WEAK], discovered[REF_FINAL], discovered[REF_PHANTOM]);
601   log_info(gc,ref)("Enqueued    references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT,
602                    enqueued[REF_SOFT], enqueued[REF_WEAK], enqueued[REF_FINAL], enqueued[REF_PHANTOM]);
603 }
604