1 /* 2 * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "classfile/javaClasses.hpp" 28 #include "gc/shared/workerThread.hpp" 29 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp" 30 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp" 31 #include "gc/shenandoah/shenandoahThreadLocalData.hpp" 32 #include "gc/shenandoah/shenandoahUtils.hpp" 33 #include "runtime/atomic.hpp" 34 #include "logging/log.hpp" 35 36 static ReferenceType reference_type(oop reference) { 37 return InstanceKlass::cast(reference->klass())->reference_type(); 38 } 39 40 static const char* reference_type_name(ReferenceType type) { 41 switch (type) { 42 case REF_SOFT: 43 return "Soft"; 44 45 case REF_WEAK: 46 return "Weak"; 47 48 case REF_FINAL: 49 return "Final"; 50 51 case REF_PHANTOM: 52 return "Phantom"; 53 54 default: 55 ShouldNotReachHere(); 56 return nullptr; 57 } 58 } 59 60 template <typename T> 61 static void set_oop_field(T* field, oop value); 62 63 template <> 64 void set_oop_field<oop>(oop* field, oop value) { 65 *field = value; 66 } 67 68 template <> 69 void set_oop_field<narrowOop>(narrowOop* field, oop value) { 70 *field = CompressedOops::encode(value); 71 } 72 73 static oop lrb(oop obj) { 74 if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) { 75 return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj); 76 } else { 77 return obj; 78 } 79 } 80 81 template <typename T> 82 static volatile T* reference_referent_addr(oop reference) { 83 return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference); 84 } 85 86 template <typename T> 87 static oop reference_referent(oop reference) { 88 T heap_oop = Atomic::load(reference_referent_addr<T>(reference)); 89 return CompressedOops::decode(heap_oop); 90 } 91 92 static void reference_clear_referent(oop reference) { 93 java_lang_ref_Reference::clear_referent_raw(reference); 94 } 95 96 template <typename T> 97 static T* reference_discovered_addr(oop reference) { 98 return reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference)); 99 } 100 101 template <typename T> 102 static oop reference_discovered(oop reference) { 103 T heap_oop = *reference_discovered_addr<T>(reference); 104 return lrb(CompressedOops::decode(heap_oop)); 105 } 106 107 template <typename T> 108 static void reference_set_discovered(oop reference, oop discovered); 109 110 template <> 111 void reference_set_discovered<oop>(oop reference, oop discovered) { 112 *reference_discovered_addr<oop>(reference) = discovered; 113 } 114 115 template <> 116 void reference_set_discovered<narrowOop>(oop reference, oop discovered) { 117 *reference_discovered_addr<narrowOop>(reference) = CompressedOops::encode(discovered); 118 } 119 120 template<typename T> 121 static bool reference_cas_discovered(oop reference, oop discovered) { 122 T* addr = reinterpret_cast<T *>(java_lang_ref_Reference::discovered_addr_raw(reference)); 123 return ShenandoahHeap::atomic_update_oop_check(discovered, addr, nullptr); 124 } 125 126 template <typename T> 127 static T* reference_next_addr(oop reference) { 128 return reinterpret_cast<T*>(java_lang_ref_Reference::next_addr_raw(reference)); 129 } 130 131 template <typename T> 132 static oop reference_next(oop reference) { 133 T heap_oop = RawAccess<>::oop_load(reference_next_addr<T>(reference)); 134 return lrb(CompressedOops::decode(heap_oop)); 135 } 136 137 static void reference_set_next(oop reference, oop next) { 138 java_lang_ref_Reference::set_next_raw(reference, next); 139 } 140 141 static void soft_reference_update_clock() { 142 const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC; 143 java_lang_ref_SoftReference::set_clock(now); 144 } 145 146 ShenandoahRefProcThreadLocal::ShenandoahRefProcThreadLocal() : 147 _discovered_list(nullptr), 148 _encountered_count(), 149 _discovered_count(), 150 _enqueued_count() { 151 } 152 153 void ShenandoahRefProcThreadLocal::reset() { 154 _discovered_list = nullptr; 155 _mark_closure = nullptr; 156 for (uint i = 0; i < reference_type_count; i++) { 157 _encountered_count[i] = 0; 158 _discovered_count[i] = 0; 159 _enqueued_count[i] = 0; 160 } 161 } 162 163 template <typename T> 164 T* ShenandoahRefProcThreadLocal::discovered_list_addr() { 165 return reinterpret_cast<T*>(&_discovered_list); 166 } 167 168 template <> 169 oop ShenandoahRefProcThreadLocal::discovered_list_head<oop>() const { 170 return *reinterpret_cast<const oop*>(&_discovered_list); 171 } 172 173 template <> 174 oop ShenandoahRefProcThreadLocal::discovered_list_head<narrowOop>() const { 175 return CompressedOops::decode(*reinterpret_cast<const narrowOop*>(&_discovered_list)); 176 } 177 178 template <> 179 void ShenandoahRefProcThreadLocal::set_discovered_list_head<narrowOop>(oop head) { 180 *discovered_list_addr<narrowOop>() = CompressedOops::encode(head); 181 } 182 183 template <> 184 void ShenandoahRefProcThreadLocal::set_discovered_list_head<oop>(oop head) { 185 *discovered_list_addr<oop>() = head; 186 } 187 188 ShenandoahReferenceProcessor::ShenandoahReferenceProcessor(uint max_workers) : 189 _soft_reference_policy(nullptr), 190 _ref_proc_thread_locals(NEW_C_HEAP_ARRAY(ShenandoahRefProcThreadLocal, max_workers, mtGC)), 191 _pending_list(nullptr), 192 _pending_list_tail(&_pending_list), 193 _iterate_discovered_list_id(0U), 194 _stats() { 195 for (size_t i = 0; i < max_workers; i++) { 196 _ref_proc_thread_locals[i].reset(); 197 } 198 } 199 200 void ShenandoahReferenceProcessor::reset_thread_locals() { 201 uint max_workers = ShenandoahHeap::heap()->max_workers(); 202 for (uint i = 0; i < max_workers; i++) { 203 _ref_proc_thread_locals[i].reset(); 204 } 205 } 206 207 void ShenandoahReferenceProcessor::set_mark_closure(uint worker_id, ShenandoahMarkRefsSuperClosure* mark_closure) { 208 _ref_proc_thread_locals[worker_id].set_mark_closure(mark_closure); 209 } 210 211 void ShenandoahReferenceProcessor::set_soft_reference_policy(bool clear) { 212 static AlwaysClearPolicy always_clear_policy; 213 static LRUMaxHeapPolicy lru_max_heap_policy; 214 215 if (clear) { 216 log_info(gc, ref)("Clearing All SoftReferences"); 217 _soft_reference_policy = &always_clear_policy; 218 } else { 219 _soft_reference_policy = &lru_max_heap_policy; 220 } 221 222 _soft_reference_policy->setup(); 223 } 224 225 template <typename T> 226 bool ShenandoahReferenceProcessor::is_inactive(oop reference, oop referent, ReferenceType type) const { 227 if (type == REF_FINAL) { 228 // A FinalReference is inactive if its next field is non-null. An application can't 229 // call enqueue() or clear() on a FinalReference. 230 return reference_next<T>(reference) != nullptr; 231 } else { 232 // A non-FinalReference is inactive if the referent is null. The referent can only 233 // be null if the application called Reference.enqueue() or Reference.clear(). 234 return referent == nullptr; 235 } 236 } 237 238 bool ShenandoahReferenceProcessor::is_strongly_live(oop referent) const { 239 return ShenandoahHeap::heap()->marking_context()->is_marked_strong(referent); 240 } 241 242 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const { 243 if (type != REF_SOFT) { 244 // Not a SoftReference 245 return false; 246 } 247 248 // Ask SoftReference policy 249 const jlong clock = java_lang_ref_SoftReference::clock(); 250 assert(clock != 0, "Clock not initialized"); 251 assert(_soft_reference_policy != nullptr, "Policy not initialized"); 252 return !_soft_reference_policy->should_clear_reference(reference, clock); 253 } 254 255 template <typename T> 256 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const { 257 T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference); 258 T heap_oop = RawAccess<>::oop_load(referent_addr); 259 oop referent = CompressedOops::decode(heap_oop); 260 261 if (is_inactive<T>(reference, referent, type)) { 262 log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference)); 263 return false; 264 } 265 266 if (is_strongly_live(referent)) { 267 log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference)); 268 return false; 269 } 270 271 if (is_softly_live(reference, type)) { 272 log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference)); 273 return false; 274 } 275 276 return true; 277 } 278 279 template <typename T> 280 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const { 281 const oop referent = reference_referent<T>(reference); 282 if (referent == nullptr) { 283 // Reference has been cleared, by a call to Reference.enqueue() 284 // or Reference.clear() from the application, which means we 285 // should drop the reference. 286 return true; 287 } 288 289 // Check if the referent is still alive, in which case we should 290 // drop the reference. 291 if (type == REF_PHANTOM) { 292 return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent); 293 } else { 294 return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent); 295 } 296 } 297 298 template <typename T> 299 void ShenandoahReferenceProcessor::make_inactive(oop reference, ReferenceType type) const { 300 if (type == REF_FINAL) { 301 // Don't clear referent. It is needed by the Finalizer thread to make the call 302 // to finalize(). A FinalReference is instead made inactive by self-looping the 303 // next field. An application can't call FinalReference.enqueue(), so there is 304 // no race to worry about when setting the next field. 305 assert(reference_next<T>(reference) == nullptr, "Already inactive"); 306 assert(ShenandoahHeap::heap()->marking_context()->is_marked(reference_referent<T>(reference)), "only make inactive final refs with alive referents"); 307 reference_set_next(reference, reference); 308 } else { 309 // Clear referent 310 reference_clear_referent(reference); 311 } 312 } 313 314 template <typename T> 315 bool ShenandoahReferenceProcessor::discover(oop reference, ReferenceType type, uint worker_id) { 316 if (!should_discover<T>(reference, type)) { 317 // Not discovered 318 return false; 319 } 320 321 if (reference_discovered<T>(reference) != nullptr) { 322 // Already discovered. This can happen if the reference is marked finalizable first, and then strong, 323 // in which case it will be seen 2x by marking. 324 log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference)); 325 return true; 326 } 327 328 if (type == REF_FINAL) { 329 ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure(); 330 bool weak = cl->is_weak(); 331 cl->set_weak(true); 332 if (UseCompressedOops) { 333 cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference))); 334 } else { 335 cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference))); 336 } 337 cl->set_weak(weak); 338 } 339 340 // Add reference to discovered list 341 ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id]; 342 oop discovered_head = refproc_data.discovered_list_head<T>(); 343 if (discovered_head == nullptr) { 344 // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their 345 // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered 346 discovered_head = reference; 347 } 348 if (reference_cas_discovered<T>(reference, discovered_head)) { 349 refproc_data.set_discovered_list_head<T>(reference); 350 assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head"); 351 log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type)); 352 _ref_proc_thread_locals[worker_id].inc_discovered(type); 353 } 354 return true; 355 } 356 357 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) { 358 if (!RegisterReferences) { 359 // Reference processing disabled 360 return false; 361 } 362 363 log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type)); 364 uint worker_id = WorkerThread::worker_id(); 365 _ref_proc_thread_locals[worker_id].inc_encountered(type); 366 367 if (UseCompressedOops) { 368 return discover<narrowOop>(reference, type, worker_id); 369 } else { 370 return discover<oop>(reference, type, worker_id); 371 } 372 } 373 374 template <typename T> 375 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) { 376 log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type)); 377 378 #ifdef ASSERT 379 oop referent = reference_referent<T>(reference); 380 assert(referent == nullptr || ShenandoahHeap::heap()->marking_context()->is_marked(referent), 381 "only drop references with alive referents"); 382 #endif 383 384 // Unlink and return next in list 385 oop next = reference_discovered<T>(reference); 386 reference_set_discovered<T>(reference, nullptr); 387 return next; 388 } 389 390 template <typename T> 391 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) { 392 log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type)); 393 394 // Update statistics 395 _ref_proc_thread_locals[worker_id].inc_enqueued(type); 396 397 // Make reference inactive 398 make_inactive<T>(reference, type); 399 400 // Return next in list 401 return reference_discovered_addr<T>(reference); 402 } 403 404 template <typename T> 405 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) { 406 log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>())); 407 T* list = refproc_data.discovered_list_addr<T>(); 408 // The list head is basically a GC root, we need to resolve and update it, 409 // otherwise we will later swap a from-space ref into Universe::pending_list(). 410 if (!CompressedOops::is_null(*list)) { 411 oop first_resolved = lrb(CompressedOops::decode_not_null(*list)); 412 set_oop_field(list, first_resolved); 413 } 414 T* p = list; 415 while (true) { 416 const oop reference = lrb(CompressedOops::decode(*p)); 417 if (reference == nullptr) { 418 break; 419 } 420 log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference)); 421 const ReferenceType type = reference_type(reference); 422 423 if (should_drop<T>(reference, type)) { 424 set_oop_field(p, drop<T>(reference, type)); 425 } else { 426 p = keep<T>(reference, type, worker_id); 427 } 428 429 const oop discovered = lrb(reference_discovered<T>(reference)); 430 if (reference == discovered) { 431 // Reset terminating self-loop to null 432 reference_set_discovered<T>(reference, oop(nullptr)); 433 break; 434 } 435 } 436 437 // Prepend discovered references to internal pending list 438 if (!CompressedOops::is_null(*list)) { 439 oop head = lrb(CompressedOops::decode_not_null(*list)); 440 shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier); 441 oop prev = Atomic::xchg(&_pending_list, head); 442 RawAccess<>::oop_store(p, prev); 443 if (prev == nullptr) { 444 // First to prepend to list, record tail 445 _pending_list_tail = reinterpret_cast<void*>(p); 446 } 447 448 // Clear discovered list 449 set_oop_field(list, oop(nullptr)); 450 } 451 } 452 453 void ShenandoahReferenceProcessor::work() { 454 // Process discovered references 455 uint max_workers = ShenandoahHeap::heap()->max_workers(); 456 uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1; 457 while (worker_id < max_workers) { 458 if (UseCompressedOops) { 459 process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id); 460 } else { 461 process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id); 462 } 463 worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1; 464 } 465 } 466 467 class ShenandoahReferenceProcessorTask : public WorkerTask { 468 private: 469 bool const _concurrent; 470 ShenandoahPhaseTimings::Phase const _phase; 471 ShenandoahReferenceProcessor* const _reference_processor; 472 473 public: 474 ShenandoahReferenceProcessorTask(ShenandoahPhaseTimings::Phase phase, bool concurrent, ShenandoahReferenceProcessor* reference_processor) : 475 WorkerTask("ShenandoahReferenceProcessorTask"), 476 _concurrent(concurrent), 477 _phase(phase), 478 _reference_processor(reference_processor) { 479 } 480 481 virtual void work(uint worker_id) { 482 if (_concurrent) { 483 ShenandoahConcurrentWorkerSession worker_session(worker_id); 484 ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id); 485 _reference_processor->work(); 486 } else { 487 ShenandoahParallelWorkerSession worker_session(worker_id); 488 ShenandoahWorkerTimingsTracker x(_phase, ShenandoahPhaseTimings::WeakRefProc, worker_id); 489 _reference_processor->work(); 490 } 491 } 492 }; 493 494 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) { 495 496 Atomic::release_store_fence(&_iterate_discovered_list_id, 0U); 497 498 // Process discovered lists 499 ShenandoahReferenceProcessorTask task(phase, concurrent, this); 500 workers->run_task(&task); 501 502 // Update SoftReference clock 503 soft_reference_update_clock(); 504 505 // Collect, log and trace statistics 506 collect_statistics(); 507 508 enqueue_references(concurrent); 509 } 510 511 void ShenandoahReferenceProcessor::enqueue_references_locked() { 512 // Prepend internal pending list to external pending list 513 shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier); 514 if (UseCompressedOops) { 515 *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list)); 516 } else { 517 *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list); 518 } 519 } 520 521 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) { 522 if (_pending_list == nullptr) { 523 // Nothing to enqueue 524 return; 525 } 526 527 if (!concurrent) { 528 // When called from mark-compact or degen-GC, the locking is done by the VMOperation, 529 enqueue_references_locked(); 530 } else { 531 // Heap_lock protects external pending list 532 MonitorLocker ml(Heap_lock); 533 534 enqueue_references_locked(); 535 536 // Notify ReferenceHandler thread 537 ml.notify_all(); 538 } 539 540 // Reset internal pending list 541 _pending_list = nullptr; 542 _pending_list_tail = &_pending_list; 543 } 544 545 template<typename T> 546 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) { 547 T discovered = *list; 548 while (!CompressedOops::is_null(discovered)) { 549 oop discovered_ref = CompressedOops::decode_not_null(discovered); 550 set_oop_field<T>(list, oop(nullptr)); 551 list = reference_discovered_addr<T>(discovered_ref); 552 discovered = *list; 553 } 554 } 555 556 void ShenandoahReferenceProcessor::abandon_partial_discovery() { 557 uint max_workers = ShenandoahHeap::heap()->max_workers(); 558 for (uint index = 0; index < max_workers; index++) { 559 if (UseCompressedOops) { 560 clean_discovered_list<narrowOop>(_ref_proc_thread_locals[index].discovered_list_addr<narrowOop>()); 561 } else { 562 clean_discovered_list<oop>(_ref_proc_thread_locals[index].discovered_list_addr<oop>()); 563 } 564 } 565 if (_pending_list != nullptr) { 566 oop pending = _pending_list; 567 _pending_list = nullptr; 568 if (UseCompressedOops) { 569 narrowOop* list = reference_discovered_addr<narrowOop>(pending); 570 clean_discovered_list<narrowOop>(list); 571 } else { 572 oop* list = reference_discovered_addr<oop>(pending); 573 clean_discovered_list<oop>(list); 574 } 575 } 576 _pending_list_tail = &_pending_list; 577 } 578 579 void ShenandoahReferenceProcessor::collect_statistics() { 580 Counters encountered = {}; 581 Counters discovered = {}; 582 Counters enqueued = {}; 583 uint max_workers = ShenandoahHeap::heap()->max_workers(); 584 for (uint i = 0; i < max_workers; i++) { 585 for (size_t type = 0; type < reference_type_count; type++) { 586 encountered[type] += _ref_proc_thread_locals[i].encountered((ReferenceType)type); 587 discovered[type] += _ref_proc_thread_locals[i].discovered((ReferenceType)type); 588 enqueued[type] += _ref_proc_thread_locals[i].enqueued((ReferenceType)type); 589 } 590 } 591 592 _stats = ReferenceProcessorStats(discovered[REF_SOFT], 593 discovered[REF_WEAK], 594 discovered[REF_FINAL], 595 discovered[REF_PHANTOM]); 596 597 log_info(gc,ref)("Encountered references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT, 598 encountered[REF_SOFT], encountered[REF_WEAK], encountered[REF_FINAL], encountered[REF_PHANTOM]); 599 log_info(gc,ref)("Discovered references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT, 600 discovered[REF_SOFT], discovered[REF_WEAK], discovered[REF_FINAL], discovered[REF_PHANTOM]); 601 log_info(gc,ref)("Enqueued references: Soft: " SIZE_FORMAT ", Weak: " SIZE_FORMAT ", Final: " SIZE_FORMAT ", Phantom: " SIZE_FORMAT, 602 enqueued[REF_SOFT], enqueued[REF_WEAK], enqueued[REF_FINAL], enqueued[REF_PHANTOM]); 603 } 604