< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahReferenceProcessor.cpp

Print this page

  1 /*
  2  * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.

  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "classfile/javaClasses.hpp"
 28 #include "gc/shared/workerThread.hpp"

 29 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 30 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"

 31 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 32 #include "gc/shenandoah/shenandoahUtils.hpp"
 33 #include "runtime/atomic.hpp"
 34 #include "logging/log.hpp"
 35 
 36 static ReferenceType reference_type(oop reference) {
 37   return InstanceKlass::cast(reference->klass())->reference_type();
 38 }
 39 
 40 static const char* reference_type_name(ReferenceType type) {
 41   switch (type) {
 42     case REF_SOFT:
 43       return "Soft";
 44 
 45     case REF_WEAK:
 46       return "Weak";
 47 
 48     case REF_FINAL:
 49       return "Final";
 50 
 51     case REF_PHANTOM:
 52       return "Phantom";
 53 
 54     default:
 55       ShouldNotReachHere();
 56       return nullptr;
 57   }
 58 }
 59 

















 60 template <typename T>
 61 static void set_oop_field(T* field, oop value);
 62 
 63 template <>
 64 void set_oop_field<oop>(oop* field, oop value) {
 65   *field = value;



 66 }
 67 
 68 template <>
 69 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
 70   *field = CompressedOops::encode(value);



 71 }
 72 
 73 static oop lrb(oop obj) {
 74   if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
 75     return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
 76   } else {
 77     return obj;
 78   }
 79 }
 80 
 81 template <typename T>
 82 static volatile T* reference_referent_addr(oop reference) {
 83   return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
 84 }
 85 
 86 template <typename T>
 87 static oop reference_referent(oop reference) {
 88   T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
 89   return CompressedOops::decode(heap_oop);
 90 }

240 }
241 
242 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
243   if (type != REF_SOFT) {
244     // Not a SoftReference
245     return false;
246   }
247 
248   // Ask SoftReference policy
249   const jlong clock = java_lang_ref_SoftReference::clock();
250   assert(clock != 0, "Clock not initialized");
251   assert(_soft_reference_policy != nullptr, "Policy not initialized");
252   return !_soft_reference_policy->should_clear_reference(reference, clock);
253 }
254 
255 template <typename T>
256 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
257   T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
258   T heap_oop = RawAccess<>::oop_load(referent_addr);
259   oop referent = CompressedOops::decode(heap_oop);

260 
261   if (is_inactive<T>(reference, referent, type)) {
262     log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
263     return false;
264   }
265 
266   if (is_strongly_live(referent)) {
267     log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
268     return false;
269   }
270 
271   if (is_softly_live(reference, type)) {
272     log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
273     return false;
274   }
275 





276   return true;
277 }
278 
279 template <typename T>
280 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
281   const oop referent = reference_referent<T>(reference);
282   if (referent == nullptr) {
283     // Reference has been cleared, by a call to Reference.enqueue()
284     // or Reference.clear() from the application, which means we
285     // should drop the reference.
286     return true;
287   }
288 
289   // Check if the referent is still alive, in which case we should
290   // drop the reference.
291   if (type == REF_PHANTOM) {
292     return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
293   } else {
294     return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
295   }

321   if (reference_discovered<T>(reference) != nullptr) {
322     // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
323     // in which case it will be seen 2x by marking.
324     log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
325     return true;
326   }
327 
328   if (type == REF_FINAL) {
329     ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
330     bool weak = cl->is_weak();
331     cl->set_weak(true);
332     if (UseCompressedOops) {
333       cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
334     } else {
335       cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
336     }
337     cl->set_weak(weak);
338   }
339 
340   // Add reference to discovered list



341   ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
342   oop discovered_head = refproc_data.discovered_list_head<T>();
343   if (discovered_head == nullptr) {
344     // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
345     // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
346     discovered_head = reference;
347   }
348   if (reference_cas_discovered<T>(reference, discovered_head)) {












349     refproc_data.set_discovered_list_head<T>(reference);
350     assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
351     log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
352     _ref_proc_thread_locals[worker_id].inc_discovered(type);
353   }
354   return true;
355 }
356 
357 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
358   if (!RegisterReferences) {
359     // Reference processing disabled
360     return false;
361   }
362 
363   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));

364   uint worker_id = WorkerThread::worker_id();
365   _ref_proc_thread_locals[worker_id].inc_encountered(type);
366 
367   if (UseCompressedOops) {
368     return discover<narrowOop>(reference, type, worker_id);
369   } else {
370     return discover<oop>(reference, type, worker_id);
371   }
372 }
373 
374 template <typename T>
375 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
376   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
377 
378 #ifdef ASSERT
379   oop referent = reference_referent<T>(reference);
380   assert(referent == nullptr || ShenandoahHeap::heap()->marking_context()->is_marked(referent),
381          "only drop references with alive referents");
382 #endif
383 
384   // Unlink and return next in list
385   oop next = reference_discovered<T>(reference);
386   reference_set_discovered<T>(reference, nullptr);








387   return next;
388 }
389 
390 template <typename T>
391 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
392   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
393 
394   // Update statistics
395   _ref_proc_thread_locals[worker_id].inc_enqueued(type);
396 
397   // Make reference inactive
398   make_inactive<T>(reference, type);
399 
400   // Return next in list
401   return reference_discovered_addr<T>(reference);
402 }
403 
404 template <typename T>
405 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {
406   log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));

418       break;
419     }
420     log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
421     const ReferenceType type = reference_type(reference);
422 
423     if (should_drop<T>(reference, type)) {
424       set_oop_field(p, drop<T>(reference, type));
425     } else {
426       p = keep<T>(reference, type, worker_id);
427     }
428 
429     const oop discovered = lrb(reference_discovered<T>(reference));
430     if (reference == discovered) {
431       // Reset terminating self-loop to null
432       reference_set_discovered<T>(reference, oop(nullptr));
433       break;
434     }
435   }
436 
437   // Prepend discovered references to internal pending list

438   if (!CompressedOops::is_null(*list)) {
439     oop head = lrb(CompressedOops::decode_not_null(*list));
440     shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
441     oop prev = Atomic::xchg(&_pending_list, head);
442     RawAccess<>::oop_store(p, prev);
443     if (prev == nullptr) {
444       // First to prepend to list, record tail
445       _pending_list_tail = reinterpret_cast<void*>(p);
446     }
447 
448     // Clear discovered list
449     set_oop_field(list, oop(nullptr));
450   }
451 }
452 
453 void ShenandoahReferenceProcessor::work() {
454   // Process discovered references
455   uint max_workers = ShenandoahHeap::heap()->max_workers();
456   uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
457   while (worker_id < max_workers) {
458     if (UseCompressedOops) {
459       process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
460     } else {
461       process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
462     }

494 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
495 
496   Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
497 
498   // Process discovered lists
499   ShenandoahReferenceProcessorTask task(phase, concurrent, this);
500   workers->run_task(&task);
501 
502   // Update SoftReference clock
503   soft_reference_update_clock();
504 
505   // Collect, log and trace statistics
506   collect_statistics();
507 
508   enqueue_references(concurrent);
509 }
510 
511 void ShenandoahReferenceProcessor::enqueue_references_locked() {
512   // Prepend internal pending list to external pending list
513   shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);













514   if (UseCompressedOops) {
515     *reinterpret_cast<narrowOop*>(_pending_list_tail) = CompressedOops::encode(Universe::swap_reference_pending_list(_pending_list));
516   } else {
517     *reinterpret_cast<oop*>(_pending_list_tail) = Universe::swap_reference_pending_list(_pending_list);
518   }
519 }
520 
521 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
522   if (_pending_list == nullptr) {
523     // Nothing to enqueue
524     return;
525   }
526 
527   if (!concurrent) {
528     // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
529     enqueue_references_locked();
530   } else {
531     // Heap_lock protects external pending list
532     MonitorLocker ml(Heap_lock);
533 
534     enqueue_references_locked();
535 
536     // Notify ReferenceHandler thread
537     ml.notify_all();
538   }
539 
540   // Reset internal pending list
541   _pending_list = nullptr;
542   _pending_list_tail = &_pending_list;
543 }
544 
545 template<typename T>
546 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {

  1 /*
  2  * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Red Hat, Inc. and/or its affiliates.
  4  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  6  *
  7  * This code is free software; you can redistribute it and/or modify it
  8  * under the terms of the GNU General Public License version 2 only, as
  9  * published by the Free Software Foundation.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #include "precompiled.hpp"
 28 #include "classfile/javaClasses.hpp"
 29 #include "gc/shared/workerThread.hpp"
 30 #include "gc/shenandoah/shenandoahGeneration.hpp"
 31 #include "gc/shenandoah/shenandoahOopClosures.inline.hpp"
 32 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp"
 33 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
 34 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 35 #include "gc/shenandoah/shenandoahUtils.hpp"
 36 #include "runtime/atomic.hpp"
 37 #include "logging/log.hpp"
 38 
 39 static ReferenceType reference_type(oop reference) {
 40   return InstanceKlass::cast(reference->klass())->reference_type();
 41 }
 42 
 43 static const char* reference_type_name(ReferenceType type) {
 44   switch (type) {
 45     case REF_SOFT:
 46       return "Soft";
 47 
 48     case REF_WEAK:
 49       return "Weak";
 50 
 51     case REF_FINAL:
 52       return "Final";
 53 
 54     case REF_PHANTOM:
 55       return "Phantom";
 56 
 57     default:
 58       ShouldNotReachHere();
 59       return nullptr;
 60   }
 61 }
 62 
 63 template <typename T>
 64 static void card_mark_barrier(T* field, oop value) {
 65   ShenandoahHeap* heap = ShenandoahHeap::heap();
 66   assert(heap->is_in_or_null(value), "Should be in heap");
 67   assert(ShenandoahCardBarrier, "Card-mark barrier should be on");
 68   if (heap->is_in_old(field) && heap->is_in_young(value)) {
 69     // For Shenandoah, each generation collects all the _referents_ that belong to the
 70     // collected generation. We can end up with discovered lists that contain a mixture
 71     // of old and young _references_. These references are linked together through the
 72     // discovered field in java.lang.Reference. In some cases, creating or editing this
 73     // list may result in the creation of _new_ old-to-young pointers which must dirty
 74     // the corresponding card. Failing to do this may cause heap verification errors and
 75     // lead to incorrect GC behavior.
 76     heap->card_scan()->mark_card_as_dirty(reinterpret_cast<HeapWord*>(field));
 77   }
 78 }
 79 
 80 template <typename T>
 81 static void set_oop_field(T* field, oop value);
 82 
 83 template <>
 84 void set_oop_field<oop>(oop* field, oop value) {
 85   *field = value;
 86   if (ShenandoahCardBarrier) {
 87     card_mark_barrier(field, value);
 88   }
 89 }
 90 
 91 template <>
 92 void set_oop_field<narrowOop>(narrowOop* field, oop value) {
 93   *field = CompressedOops::encode(value);
 94   if (ShenandoahCardBarrier) {
 95     card_mark_barrier(field, value);
 96   }
 97 }
 98 
 99 static oop lrb(oop obj) {
100   if (obj != nullptr && ShenandoahHeap::heap()->marking_context()->is_marked(obj)) {
101     return ShenandoahBarrierSet::barrier_set()->load_reference_barrier(obj);
102   } else {
103     return obj;
104   }
105 }
106 
107 template <typename T>
108 static volatile T* reference_referent_addr(oop reference) {
109   return (volatile T*)java_lang_ref_Reference::referent_addr_raw(reference);
110 }
111 
112 template <typename T>
113 static oop reference_referent(oop reference) {
114   T heap_oop = Atomic::load(reference_referent_addr<T>(reference));
115   return CompressedOops::decode(heap_oop);
116 }

266 }
267 
268 bool ShenandoahReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
269   if (type != REF_SOFT) {
270     // Not a SoftReference
271     return false;
272   }
273 
274   // Ask SoftReference policy
275   const jlong clock = java_lang_ref_SoftReference::clock();
276   assert(clock != 0, "Clock not initialized");
277   assert(_soft_reference_policy != nullptr, "Policy not initialized");
278   return !_soft_reference_policy->should_clear_reference(reference, clock);
279 }
280 
281 template <typename T>
282 bool ShenandoahReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
283   T* referent_addr = (T*) java_lang_ref_Reference::referent_addr_raw(reference);
284   T heap_oop = RawAccess<>::oop_load(referent_addr);
285   oop referent = CompressedOops::decode(heap_oop);
286   ShenandoahHeap* heap = ShenandoahHeap::heap();
287 
288   if (is_inactive<T>(reference, referent, type)) {
289     log_trace(gc,ref)("Reference inactive: " PTR_FORMAT, p2i(reference));
290     return false;
291   }
292 
293   if (is_strongly_live(referent)) {
294     log_trace(gc,ref)("Reference strongly live: " PTR_FORMAT, p2i(reference));
295     return false;
296   }
297 
298   if (is_softly_live(reference, type)) {
299     log_trace(gc,ref)("Reference softly live: " PTR_FORMAT, p2i(reference));
300     return false;
301   }
302 
303   if (!heap->is_in_active_generation(referent)) {
304     log_trace(gc,ref)("Referent outside of active generation: " PTR_FORMAT, p2i(referent));
305     return false;
306   }
307 
308   return true;
309 }
310 
311 template <typename T>
312 bool ShenandoahReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
313   const oop referent = reference_referent<T>(reference);
314   if (referent == nullptr) {
315     // Reference has been cleared, by a call to Reference.enqueue()
316     // or Reference.clear() from the application, which means we
317     // should drop the reference.
318     return true;
319   }
320 
321   // Check if the referent is still alive, in which case we should
322   // drop the reference.
323   if (type == REF_PHANTOM) {
324     return ShenandoahHeap::heap()->complete_marking_context()->is_marked(referent);
325   } else {
326     return ShenandoahHeap::heap()->complete_marking_context()->is_marked_strong(referent);
327   }

353   if (reference_discovered<T>(reference) != nullptr) {
354     // Already discovered. This can happen if the reference is marked finalizable first, and then strong,
355     // in which case it will be seen 2x by marking.
356     log_trace(gc,ref)("Reference already discovered: " PTR_FORMAT, p2i(reference));
357     return true;
358   }
359 
360   if (type == REF_FINAL) {
361     ShenandoahMarkRefsSuperClosure* cl = _ref_proc_thread_locals[worker_id].mark_closure();
362     bool weak = cl->is_weak();
363     cl->set_weak(true);
364     if (UseCompressedOops) {
365       cl->do_oop(reinterpret_cast<narrowOop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
366     } else {
367       cl->do_oop(reinterpret_cast<oop*>(java_lang_ref_Reference::referent_addr_raw(reference)));
368     }
369     cl->set_weak(weak);
370   }
371 
372   // Add reference to discovered list
373   // Each worker thread has a private copy of refproc_data, which includes a private discovered list.  This means
374   // there's no risk that a different worker thread will try to manipulate my discovered list head while I'm making
375   // reference the head of my discovered list.
376   ShenandoahRefProcThreadLocal& refproc_data = _ref_proc_thread_locals[worker_id];
377   oop discovered_head = refproc_data.discovered_list_head<T>();
378   if (discovered_head == nullptr) {
379     // Self-loop tail of list. We distinguish discovered from not-discovered references by looking at their
380     // discovered field: if it is null, then it is not-yet discovered, otherwise it is discovered
381     discovered_head = reference;
382   }
383   if (reference_cas_discovered<T>(reference, discovered_head)) {
384     // We successfully set this reference object's next pointer to discovered_head.  This marks reference as discovered.
385     // If reference_cas_discovered fails, that means some other worker thread took credit for discovery of this reference,
386     // and that other thread will place reference on its discovered list, so I can ignore reference.
387 
388     // In case we have created an interesting pointer, mark the remembered set card as dirty.
389     ShenandoahHeap* heap = ShenandoahHeap::heap();
390     if (ShenandoahCardBarrier) {
391       T* addr = reinterpret_cast<T*>(java_lang_ref_Reference::discovered_addr_raw(reference));
392       card_mark_barrier(addr, discovered_head);
393     }
394 
395     // Make the discovered_list_head point to reference.
396     refproc_data.set_discovered_list_head<T>(reference);
397     assert(refproc_data.discovered_list_head<T>() == reference, "reference must be new discovered head");
398     log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
399     _ref_proc_thread_locals[worker_id].inc_discovered(type);
400   }
401   return true;
402 }
403 
404 bool ShenandoahReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
405   if (!RegisterReferences) {
406     // Reference processing disabled
407     return false;
408   }
409 
410   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s, %s)",
411           p2i(reference), reference_type_name(type), ShenandoahHeap::heap()->heap_region_containing(reference)->affiliation_name());
412   uint worker_id = WorkerThread::worker_id();
413   _ref_proc_thread_locals[worker_id].inc_encountered(type);
414 
415   if (UseCompressedOops) {
416     return discover<narrowOop>(reference, type, worker_id);
417   } else {
418     return discover<oop>(reference, type, worker_id);
419   }
420 }
421 
422 template <typename T>
423 oop ShenandoahReferenceProcessor::drop(oop reference, ReferenceType type) {
424   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
425 
426   ShenandoahHeap* heap = ShenandoahHeap::heap();
427   oop referent = reference_referent<T>(reference);
428   assert(referent == nullptr || heap->marking_context()->is_marked(referent), "only drop references with alive referents");


429 
430   // Unlink and return next in list
431   oop next = reference_discovered<T>(reference);
432   reference_set_discovered<T>(reference, nullptr);
433   // When this reference was discovered, it would not have been marked. If it ends up surviving
434   // the cycle, we need to dirty the card if the reference is old and the referent is young.  Note
435   // that if the reference is not dropped, then its pointer to the referent will be nulled before
436   // evacuation begins so card does not need to be dirtied.
437   if (heap->mode()->is_generational() && heap->is_in_old(reference) && heap->is_in_young(referent)) {
438     // Note: would be sufficient to mark only the card that holds the start of this Reference object.
439     heap->card_scan()->mark_range_as_dirty(cast_from_oop<HeapWord*>(reference), reference->size());
440   }
441   return next;
442 }
443 
444 template <typename T>
445 T* ShenandoahReferenceProcessor::keep(oop reference, ReferenceType type, uint worker_id) {
446   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
447 
448   // Update statistics
449   _ref_proc_thread_locals[worker_id].inc_enqueued(type);
450 
451   // Make reference inactive
452   make_inactive<T>(reference, type);
453 
454   // Return next in list
455   return reference_discovered_addr<T>(reference);
456 }
457 
458 template <typename T>
459 void ShenandoahReferenceProcessor::process_references(ShenandoahRefProcThreadLocal& refproc_data, uint worker_id) {
460   log_trace(gc, ref)("Processing discovered list #%u : " PTR_FORMAT, worker_id, p2i(refproc_data.discovered_list_head<T>()));

472       break;
473     }
474     log_trace(gc, ref)("Processing reference: " PTR_FORMAT, p2i(reference));
475     const ReferenceType type = reference_type(reference);
476 
477     if (should_drop<T>(reference, type)) {
478       set_oop_field(p, drop<T>(reference, type));
479     } else {
480       p = keep<T>(reference, type, worker_id);
481     }
482 
483     const oop discovered = lrb(reference_discovered<T>(reference));
484     if (reference == discovered) {
485       // Reset terminating self-loop to null
486       reference_set_discovered<T>(reference, oop(nullptr));
487       break;
488     }
489   }
490 
491   // Prepend discovered references to internal pending list
492   // set_oop_field maintains the card mark barrier as this list is constructed.
493   if (!CompressedOops::is_null(*list)) {
494     oop head = lrb(CompressedOops::decode_not_null(*list));
495     shenandoah_assert_not_in_cset_except(&head, head, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
496     oop prev = Atomic::xchg(&_pending_list, head);
497     set_oop_field(p, prev);
498     if (prev == nullptr) {
499       // First to prepend to list, record tail
500       _pending_list_tail = reinterpret_cast<void*>(p);
501     }
502 
503     // Clear discovered list
504     set_oop_field(list, oop(nullptr));
505   }
506 }
507 
508 void ShenandoahReferenceProcessor::work() {
509   // Process discovered references
510   uint max_workers = ShenandoahHeap::heap()->max_workers();
511   uint worker_id = Atomic::add(&_iterate_discovered_list_id, 1U, memory_order_relaxed) - 1;
512   while (worker_id < max_workers) {
513     if (UseCompressedOops) {
514       process_references<narrowOop>(_ref_proc_thread_locals[worker_id], worker_id);
515     } else {
516       process_references<oop>(_ref_proc_thread_locals[worker_id], worker_id);
517     }

549 void ShenandoahReferenceProcessor::process_references(ShenandoahPhaseTimings::Phase phase, WorkerThreads* workers, bool concurrent) {
550 
551   Atomic::release_store_fence(&_iterate_discovered_list_id, 0U);
552 
553   // Process discovered lists
554   ShenandoahReferenceProcessorTask task(phase, concurrent, this);
555   workers->run_task(&task);
556 
557   // Update SoftReference clock
558   soft_reference_update_clock();
559 
560   // Collect, log and trace statistics
561   collect_statistics();
562 
563   enqueue_references(concurrent);
564 }
565 
566 void ShenandoahReferenceProcessor::enqueue_references_locked() {
567   // Prepend internal pending list to external pending list
568   shenandoah_assert_not_in_cset_except(&_pending_list, _pending_list, ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahLoadRefBarrier);
569 
570   // During reference processing, we maintain a local list of references that are identified by
571   //   _pending_list and _pending_list_tail.  _pending_list_tail points to the next field of the last Reference object on
572   //   the local list.
573   //
574   // There is also a global list of reference identified by Universe::_reference_pending_list
575 
576   // The following code has the effect of:
577   //  1. Making the global Universe::_reference_pending_list point to my local list
578   //  2. Overwriting the next field of the last Reference on my local list to point at the previous head of the
579   //     global Universe::_reference_pending_list
580 
581   oop former_head_of_global_list = Universe::swap_reference_pending_list(_pending_list);
582   if (UseCompressedOops) {
583     set_oop_field<narrowOop>(reinterpret_cast<narrowOop*>(_pending_list_tail), former_head_of_global_list);
584   } else {
585     set_oop_field<oop>(reinterpret_cast<oop*>(_pending_list_tail), former_head_of_global_list);
586   }
587 }
588 
589 void ShenandoahReferenceProcessor::enqueue_references(bool concurrent) {
590   if (_pending_list == nullptr) {
591     // Nothing to enqueue
592     return;
593   }

594   if (!concurrent) {
595     // When called from mark-compact or degen-GC, the locking is done by the VMOperation,
596     enqueue_references_locked();
597   } else {
598     // Heap_lock protects external pending list
599     MonitorLocker ml(Heap_lock);
600 
601     enqueue_references_locked();
602 
603     // Notify ReferenceHandler thread
604     ml.notify_all();
605   }
606 
607   // Reset internal pending list
608   _pending_list = nullptr;
609   _pending_list_tail = &_pending_list;
610 }
611 
612 template<typename T>
613 void ShenandoahReferenceProcessor::clean_discovered_list(T* list) {
< prev index next >