1 /*
  2  * Copyright (c) 2015, 2022, Red Hat, Inc. All rights reserved.
  3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 27 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 28 
 29 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 30 
 31 #include "gc/shared/accessBarrierSupport.inline.hpp"
 32 #include "gc/shared/cardTable.hpp"
 33 #include "gc/shenandoah/shenandoahAsserts.hpp"
 34 #include "gc/shenandoah/shenandoahCardTable.hpp"
 35 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
 36 #include "gc/shenandoah/shenandoahEvacOOMHandler.inline.hpp"
 37 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
 38 #include "gc/shenandoah/shenandoahGeneration.hpp"
 39 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 40 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
 41 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
 42 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 43 #include "gc/shenandoah/mode/shenandoahMode.hpp"
 44 #include "memory/iterator.inline.hpp"
 45 #include "oops/oop.inline.hpp"
 46 
 47 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
 48   return ShenandoahForwarding::get_forwardee(p);
 49 }
 50 
 51 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
 52   if (p != nullptr) {
 53     return resolve_forwarded_not_null(p);
 54   } else {
 55     return p;
 56   }
 57 }
 58 
 59 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null_mutator(oop p) {
 60   return ShenandoahForwarding::get_forwardee_mutator(p);
 61 }
 62 
 63 template <class T>
 64 inline oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, T* load_addr) {
 65   assert(ShenandoahLoadRefBarrier, "should be enabled");
 66   shenandoah_assert_in_cset(load_addr, obj);
 67 
 68   oop fwd = resolve_forwarded_not_null_mutator(obj);
 69   if (obj == fwd) {
 70     assert(_heap->is_evacuation_in_progress(),
 71            "evac should be in progress");
 72     Thread* const t = Thread::current();
 73     ShenandoahEvacOOMScope scope(t);
 74     fwd = _heap->evacuate_object(obj, t);
 75   }
 76 
 77   if (load_addr != nullptr && fwd != obj) {
 78     // Since we are here and we know the load address, update the reference.
 79     ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
 80   }
 81 
 82   return fwd;
 83 }
 84 
 85 inline oop ShenandoahBarrierSet::load_reference_barrier(oop obj) {
 86   if (!ShenandoahLoadRefBarrier) {
 87     return obj;
 88   }
 89   if (_heap->has_forwarded_objects() &&
 90       _heap->in_collection_set(obj)) { // Subsumes null-check
 91     assert(obj != nullptr, "cset check must have subsumed null-check");
 92     oop fwd = resolve_forwarded_not_null(obj);
 93     if (obj == fwd && _heap->is_evacuation_in_progress()) {
 94       Thread* t = Thread::current();
 95       ShenandoahEvacOOMScope oom_evac_scope(t);
 96       return _heap->evacuate_object(obj, t);
 97     }
 98     return fwd;
 99   }
100   return obj;
101 }
102 
103 template <class T>
104 inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators, oop obj, T* load_addr) {
105   if (obj == nullptr) {
106     return nullptr;
107   }
108 
109   // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
110   if ((decorators & ON_PHANTOM_OOP_REF) != 0 &&
111       _heap->is_concurrent_weak_root_in_progress() &&
112       _heap->is_in_active_generation(obj) &&
113       !_heap->marking_context()->is_marked(obj)) {
114     return nullptr;
115   }
116 
117   // Prevent resurrection of unreachable weak references.
118   if ((decorators & ON_WEAK_OOP_REF) != 0 &&
119       _heap->is_concurrent_weak_root_in_progress() &&
120       _heap->is_in_active_generation(obj) &&
121       !_heap->marking_context()->is_marked_strong(obj)) {
122     return nullptr;
123   }
124 
125   // Prevent resurrection of unreachable objects that are visited during
126   // concurrent class-unloading.
127   if ((decorators & AS_NO_KEEPALIVE) != 0 &&
128       _heap->is_evacuation_in_progress() &&
129       !_heap->marking_context()->is_marked(obj)) {
130     return obj;
131   }
132 
133   oop fwd = load_reference_barrier(obj);
134   if (load_addr != nullptr && fwd != obj) {
135     // Since we are here and we know the load address, update the reference.
136     ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
137   }
138 
139   return fwd;
140 }
141 
142 inline void ShenandoahBarrierSet::enqueue(oop obj) {
143   assert(obj != nullptr, "checked by caller");
144   assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
145 
146   // Filter marked objects before hitting the SATB queues. The same predicate would
147   // be used by SATBMQ::filter to eliminate already marked objects downstream, but
148   // filtering here helps to avoid wasteful SATB queueing work to begin with.
149   if (!_heap->requires_marking(obj)) return;
150 
151   SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(Thread::current());
152   _satb_mark_queue_set.enqueue_known_active(queue, obj);
153 }
154 
155 template <DecoratorSet decorators, typename T>
156 inline void ShenandoahBarrierSet::satb_barrier(T *field) {
157   if (HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value ||
158       HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
159     return;
160   }
161   if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
162     T heap_oop = RawAccess<>::oop_load(field);
163     if (!CompressedOops::is_null(heap_oop)) {
164       enqueue(CompressedOops::decode(heap_oop));
165     }
166   }
167 }
168 
169 inline void ShenandoahBarrierSet::satb_enqueue(oop value) {
170   if (value != nullptr && ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
171     enqueue(value);
172   }
173 }
174 
175 inline void ShenandoahBarrierSet::iu_barrier(oop obj) {
176   if (ShenandoahIUBarrier && obj != nullptr && _heap->is_concurrent_mark_in_progress()) {
177     enqueue(obj);
178   }
179 }
180 
181 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
182   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
183   const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
184   const bool peek              = (decorators & AS_NO_KEEPALIVE) != 0;
185   if (!peek && !on_strong_oop_ref) {
186     satb_enqueue(value);
187   }
188 }
189 
190 template <DecoratorSet decorators, typename T>
191 inline void ShenandoahBarrierSet::write_ref_field_post(T* field) {
192   assert(ShenandoahCardBarrier, "Should have been checked by caller");
193   volatile CardTable::CardValue* byte = card_table()->byte_for(field);
194   *byte = CardTable::dirty_card_val();
195 }
196 
197 template <typename T>
198 inline oop ShenandoahBarrierSet::oop_load(DecoratorSet decorators, T* addr) {
199   oop value = RawAccess<>::oop_load(addr);
200   value = load_reference_barrier(decorators, value, addr);
201   keep_alive_if_weak(decorators, value);
202   return value;
203 }
204 
205 template <typename T>
206 inline oop ShenandoahBarrierSet::oop_cmpxchg(DecoratorSet decorators, T* addr, oop compare_value, oop new_value) {
207   iu_barrier(new_value);
208   oop res;
209   oop expected = compare_value;
210   do {
211     compare_value = expected;
212     res = RawAccess<>::oop_atomic_cmpxchg(addr, compare_value, new_value);
213     expected = res;
214   } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected)));
215 
216   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
217   // because it must be the previous value.
218   res = load_reference_barrier(decorators, res, static_cast<T*>(nullptr));
219   satb_enqueue(res);
220   return res;
221 }
222 
223 template <typename T>
224 inline oop ShenandoahBarrierSet::oop_xchg(DecoratorSet decorators, T* addr, oop new_value) {
225   iu_barrier(new_value);
226   oop previous = RawAccess<>::oop_atomic_xchg(addr, new_value);
227   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
228   // because it must be the previous value.
229   previous = load_reference_barrier<T>(decorators, previous, static_cast<T*>(nullptr));
230   satb_enqueue(previous);
231   return previous;
232 }
233 
234 template <DecoratorSet decorators, typename BarrierSetT>
235 template <typename T>
236 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(T* addr) {
237   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
238   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
239   return bs->oop_load(decorators, addr);
240 }
241 
242 template <DecoratorSet decorators, typename BarrierSetT>
243 template <typename T>
244 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(T* addr) {
245   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
246   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
247   return bs->oop_load(decorators, addr);
248 }
249 
250 template <DecoratorSet decorators, typename BarrierSetT>
251 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
252   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
253   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
254   return bs->oop_load(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset));
255 }
256 
257 template <DecoratorSet decorators, typename BarrierSetT>
258 template <typename T>
259 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_common(T* addr, oop value) {
260   shenandoah_assert_marked_if(nullptr, value,
261                               !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress()
262                               && !(ShenandoahHeap::heap()->active_generation()->is_young()
263                                    && ShenandoahHeap::heap()->heap_region_containing(value)->is_old()));
264   shenandoah_assert_not_in_cset_if(addr, value, value != nullptr && !ShenandoahHeap::heap()->cancelled_gc());
265   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
266   bs->iu_barrier(value);
267   bs->satb_barrier<decorators>(addr);
268   Raw::oop_store(addr, value);
269 }
270 
271 template <DecoratorSet decorators, typename BarrierSetT>
272 template <typename T>
273 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
274   oop_store_common(addr, value);
275 }
276 
277 template <DecoratorSet decorators, typename BarrierSetT>
278 template <typename T>
279 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(T* addr, oop value) {
280   shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
281   shenandoah_assert_not_forwarded_except  (addr, value, value == nullptr || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
282 
283   oop_store_common(addr, value);
284   if (ShenandoahCardBarrier) {
285     ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
286     bs->write_ref_field_post<decorators>(addr);
287   }
288 }
289 
290 template <DecoratorSet decorators, typename BarrierSetT>
291 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
292   oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);
293 }
294 
295 template <DecoratorSet decorators, typename BarrierSetT>
296 template <typename T>
297 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) {
298   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
299   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
300   return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
301 }
302 
303 template <DecoratorSet decorators, typename BarrierSetT>
304 template <typename T>
305 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
306   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
307   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
308   oop result = bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
309   if (ShenandoahCardBarrier) {
310     bs->write_ref_field_post<decorators>(addr);
311   }
312   return result;
313 }
314 
315 template <DecoratorSet decorators, typename BarrierSetT>
316 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
317   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
318   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
319   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
320   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
321   oop result = bs->oop_cmpxchg(resolved_decorators, addr, compare_value, new_value);
322   if (ShenandoahCardBarrier) {
323     bs->write_ref_field_post<decorators>(addr);
324   }
325   return result;
326 }
327 
328 template <DecoratorSet decorators, typename BarrierSetT>
329 template <typename T>
330 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {
331   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
332   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
333   return bs->oop_xchg(decorators, addr, new_value);
334 }
335 
336 template <DecoratorSet decorators, typename BarrierSetT>
337 template <typename T>
338 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
339   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
340   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
341   oop result = bs->oop_xchg(decorators, addr, new_value);
342   if (ShenandoahCardBarrier) {
343     bs->write_ref_field_post<decorators>(addr);
344   }
345   return result;
346 }
347 
348 template <DecoratorSet decorators, typename BarrierSetT>
349 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
350   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
351   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
352   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
353   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
354   oop result = bs->oop_xchg(resolved_decorators, addr, new_value);
355   if (ShenandoahCardBarrier) {
356     bs->write_ref_field_post<decorators>(addr);
357   }
358   return result;
359 }
360 
361 // Clone barrier support
362 template <DecoratorSet decorators, typename BarrierSetT>
363 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
364   if (ShenandoahCloneBarrier) {
365     ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
366   }
367   Raw::clone(src, dst, size);
368 }
369 
370 template <DecoratorSet decorators, typename BarrierSetT>
371 template <typename T>
372 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
373                                                                                          arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
374                                                                                          size_t length) {
375   T* src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
376   T* dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
377 
378   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
379   bs->arraycopy_barrier(src, dst, length);
380   bool result = Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
381   if (ShenandoahCardBarrier) {
382     bs->write_ref_array((HeapWord*) dst, length);
383   }
384   return result;
385 }
386 
387 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
388 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
389   // Young cycles are allowed to run when old marking is in progress. When old marking is in progress,
390   // this barrier will be called with ENQUEUE=true and HAS_FWD=false, even though the young generation
391   // may have forwarded objects. In this case, the `arraycopy_work` is first called with HAS_FWD=true and
392   // ENQUEUE=false.
393   assert(HAS_FWD == _heap->has_forwarded_objects() || (_heap->gc_state() & ShenandoahHeap::OLD_MARKING) != 0,
394          "Forwarded object status is sane");
395   // This function cannot be called to handle marking and evacuation at the same time (they operate on
396   // different sides of the copy).
397   assert((HAS_FWD || EVAC) != ENQUEUE, "Cannot evacuate and mark both sides of copy.");
398 
399   Thread* thread = Thread::current();
400   SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
401   ShenandoahMarkingContext* ctx = _heap->marking_context();
402   const ShenandoahCollectionSet* const cset = _heap->collection_set();
403   T* end = src + count;
404   for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
405     T o = RawAccess<>::oop_load(elem_ptr);
406     if (!CompressedOops::is_null(o)) {
407       oop obj = CompressedOops::decode_not_null(o);
408       if (HAS_FWD && cset->is_in(obj)) {
409         oop fwd = resolve_forwarded_not_null(obj);
410         if (EVAC && obj == fwd) {
411           fwd = _heap->evacuate_object(obj, thread);
412         }
413         shenandoah_assert_forwarded_except(elem_ptr, obj, _heap->cancelled_gc());
414         ShenandoahHeap::atomic_update_oop(fwd, elem_ptr, o);
415       }
416       if (ENQUEUE && !ctx->is_marked_strong_or_old(obj)) {
417         _satb_mark_queue_set.enqueue_known_active(queue, obj);
418       }
419     }
420   }
421 }
422 
423 template <class T>
424 void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
425   if (count == 0) {
426     // No elements to copy, no need for barrier
427     return;
428   }
429 
430   int gc_state = _heap->gc_state();
431   if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
432     arraycopy_evacuation(src, count);
433   } else if ((gc_state & ShenandoahHeap::UPDATEREFS) != 0) {
434     arraycopy_update(src, count);
435   }
436 
437   if (_heap->mode()->is_generational()) {
438     assert(ShenandoahSATBBarrier, "Generational mode assumes SATB mode");
439     if ((gc_state & ShenandoahHeap::YOUNG_MARKING) != 0) {
440       arraycopy_marking(src, dst, count, false);
441     }
442     if ((gc_state & ShenandoahHeap::OLD_MARKING) != 0) {
443       arraycopy_marking(src, dst, count, true);
444     }
445   } else if ((gc_state & ShenandoahHeap::MARKING) != 0) {
446     arraycopy_marking(src, dst, count, false);
447   }
448 }
449 
450 template <class T>
451 void ShenandoahBarrierSet::arraycopy_marking(T* src, T* dst, size_t count, bool is_old_marking) {
452   assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
453   /*
454    * Note that an old-gen object is considered live if it is live at the start of OLD marking or if it is promoted
455    * following the start of OLD marking.
456    *
457    * 1. Every object promoted following the start of OLD marking will be above TAMS within its old-gen region
458    * 2. Every object live at the start of OLD marking will be referenced from a "root" or it will be referenced from
459    *    another live OLD-gen object.  With regards to old-gen, roots include stack locations and all of live young-gen.
460    *    All root references to old-gen are identified during a bootstrap young collection.  All references from other
461    *    old-gen objects will be marked during the traversal of all old objects, or will be marked by the SATB barrier.
462    *
463    * During old-gen marking (which is interleaved with young-gen collections), call arraycopy_work() if:
464    *
465    * 1. The overwritten array resides in old-gen and it is below TAMS within its old-gen region
466    * 2. Do not call arraycopy_work for any array residing in young-gen because young-gen collection is idle at this time
467    *
468    * During young-gen marking, call arraycopy_work() if:
469    *
470    * 1. The overwritten array resides in young-gen and is below TAMS within its young-gen region
471    * 2. Additionally, if array resides in old-gen, regardless of its relationship to TAMS because this old-gen array
472    *    may hold references to young-gen
473    */
474   if (ShenandoahSATBBarrier) {
475     T* array = dst;
476     HeapWord* array_addr = reinterpret_cast<HeapWord*>(array);
477     ShenandoahHeapRegion* r = _heap->heap_region_containing(array_addr);
478     if (is_old_marking) {
479       // Generational, old marking
480       assert(_heap->mode()->is_generational(), "Invariant");
481       if (r->is_old() && (array_addr < _heap->marking_context()->top_at_mark_start(r))) {
482         arraycopy_work<T, false, false, true>(array, count);
483       }
484     } else if (_heap->mode()->is_generational()) {
485       // Generational, young marking
486       if (r->is_old() || (array_addr < _heap->marking_context()->top_at_mark_start(r))) {
487         arraycopy_work<T, false, false, true>(array, count);
488       }
489     } else if (array_addr < _heap->marking_context()->top_at_mark_start(r)) {
490       // Non-generational, marking
491       arraycopy_work<T, false, false, true>(array, count);
492     }
493   } else {
494     // Incremental Update mode, marking
495     T* array = src;
496     HeapWord* array_addr = reinterpret_cast<HeapWord*>(array);
497     ShenandoahHeapRegion* r = _heap->heap_region_containing(array_addr);
498     if (array_addr < _heap->marking_context()->top_at_mark_start(r)) {
499       arraycopy_work<T, false, false, true>(array, count);
500     }
501   }
502 }
503 
504 inline bool ShenandoahBarrierSet::need_bulk_update(HeapWord* ary) {
505   return ary < _heap->heap_region_containing(ary)->get_update_watermark();
506 }
507 
508 template <class T>
509 void ShenandoahBarrierSet::arraycopy_evacuation(T* src, size_t count) {
510   assert(_heap->is_evacuation_in_progress(), "only during evacuation");
511   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
512     ShenandoahEvacOOMScope oom_evac;
513     arraycopy_work<T, true, true, false>(src, count);
514   }
515 }
516 
517 template <class T>
518 void ShenandoahBarrierSet::arraycopy_update(T* src, size_t count) {
519   assert(_heap->is_update_refs_in_progress(), "only during update-refs");
520   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
521     arraycopy_work<T, true, false, false>(src, count);
522   }
523 }
524 
525 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP