1 /*
2 * Copyright (c) 2015, 2022, Red Hat, Inc. All rights reserved.
3 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
27 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
28
29 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
30
31 #include "gc/shared/accessBarrierSupport.inline.hpp"
32 #include "gc/shared/cardTable.hpp"
33 #include "gc/shenandoah/mode/shenandoahMode.hpp"
34 #include "gc/shenandoah/shenandoahAsserts.hpp"
35 #include "gc/shenandoah/shenandoahCardTable.hpp"
36 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
37 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
38 #include "gc/shenandoah/shenandoahGeneration.hpp"
39 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
40 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
41 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
42 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
43 #include "memory/iterator.inline.hpp"
44 #include "oops/oop.inline.hpp"
45
46 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
47 return ShenandoahForwarding::get_forwardee(p);
48 }
49
50 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
51 if (p != nullptr) {
52 return resolve_forwarded_not_null(p);
53 } else {
54 return p;
55 }
56 }
57
58 template <DecoratorSet decorators, class T>
59 inline oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, T* load_addr) {
60 assert(ShenandoahLoadRefBarrier, "Should be enabled");
61
62 constexpr bool on_weak = HasDecorator<decorators, ON_WEAK_OOP_REF>::value;
63 constexpr bool on_phantom = HasDecorator<decorators, ON_PHANTOM_OOP_REF>::value;
64
65 // Handle nulls. Strong loads filtered nulls with cset checks.
66 // Weak/phantom loads need to check for nulls here.
67 if (on_weak || on_phantom) {
68 if (obj == nullptr) {
69 return nullptr;
70 }
71 } else {
72 assert(obj != nullptr, "Should have been filtered before");
73 }
74
75 // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
76 if (on_phantom &&
77 _heap->is_concurrent_weak_root_in_progress() &&
78 _heap->is_in_active_generation(obj) &&
79 !_heap->marking_context()->is_marked(obj)) {
80 return nullptr;
81 }
82
83 // Prevent resurrection of unreachable weak references.
84 if (on_weak &&
85 _heap->is_concurrent_weak_root_in_progress() &&
86 _heap->is_in_active_generation(obj) &&
87 !_heap->marking_context()->is_marked_strong(obj)) {
88 return nullptr;
89 }
90
91 // Weak/phantom loads need additional cset check.
92 if (on_phantom || on_weak) {
93 if (!_heap->has_forwarded_objects() || !_heap->in_collection_set(obj)) {
94 return obj;
95 }
96 } else {
97 shenandoah_assert_in_cset(load_addr, obj);
98 }
99
100 oop fwd = ShenandoahForwarding::get_forwardee_mutator(obj);
101 if (obj == fwd) {
102 assert(_heap->is_evacuation_in_progress(), "evac should be in progress");
103 Thread* const t = Thread::current();
104 fwd = _heap->evacuate_object(obj, t);
105 }
106
107 if (load_addr != nullptr && fwd != obj) {
108 // Since we are here and we know the load address, update the reference.
109 ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
110 }
111
112 return fwd;
113 }
114
115 inline oop ShenandoahBarrierSet::load_reference_barrier(oop obj) {
116 if (!ShenandoahLoadRefBarrier) {
117 return obj;
118 }
119 if (_heap->has_forwarded_objects() && _heap->in_collection_set(obj)) {
120 // Subsumes null-check
121 assert(obj != nullptr, "cset check must have subsumed null-check");
122 oop fwd = resolve_forwarded_not_null(obj);
123 if (obj == fwd && _heap->is_evacuation_in_progress()) {
124 Thread* t = Thread::current();
125 return _heap->evacuate_object(obj, t);
126 }
127 return fwd;
128 }
129 return obj;
130 }
131
132 template <class T>
133 inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators, oop obj, T* load_addr) {
134 if (obj == nullptr) {
135 return nullptr;
136 }
137
138 // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
139 if ((decorators & ON_PHANTOM_OOP_REF) != 0 &&
140 _heap->is_concurrent_weak_root_in_progress() &&
141 _heap->is_in_active_generation(obj) &&
142 !_heap->marking_context()->is_marked(obj)) {
143 return nullptr;
144 }
145
146 // Prevent resurrection of unreachable weak references.
147 if ((decorators & ON_WEAK_OOP_REF) != 0 &&
148 _heap->is_concurrent_weak_root_in_progress() &&
149 _heap->is_in_active_generation(obj) &&
150 !_heap->marking_context()->is_marked_strong(obj)) {
151 return nullptr;
152 }
153
154 // Allow runtime to see unreachable objects that are visited during concurrent class-unloading.
155 if ((decorators & AS_NO_KEEPALIVE) != 0 &&
156 _heap->is_concurrent_weak_root_in_progress() &&
157 !_heap->marking_context()->is_marked(obj)) {
158 return obj;
159 }
160
161 oop fwd = load_reference_barrier(obj);
162 if (load_addr != nullptr && fwd != obj) {
163 // Since we are here and we know the load address, update the reference.
164 ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
165 }
166
167 return fwd;
168 }
169
170 inline void ShenandoahBarrierSet::enqueue(oop obj, bool filter) {
171 assert(obj != nullptr, "checked by caller");
172 assert(_satb_mark_queue_set.is_active(), "only get here when SATB active");
173
174 // Filter marked objects before hitting the SATB queues. The same predicate would
175 // be used by SATBMQ::filter to eliminate already marked objects downstream, but
176 // filtering here helps to avoid wasteful SATB queueing work to begin with.
177 if (filter && !_heap->requires_marking(obj)) return;
178
179 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(Thread::current());
180 _satb_mark_queue_set.enqueue_known_active(queue, obj);
181 }
182
183 template <DecoratorSet decorators, typename T>
184 inline void ShenandoahBarrierSet::satb_barrier(T *field) {
185 // Uninitialized and no-keepalive stores do not need barrier.
186 if (HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value ||
187 HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
188 return;
189 }
190
191 // Stores to weak/phantom require no barrier. The original references would
192 // have been enqueued in the SATB buffer by the load barrier if they were needed.
193 if (HasDecorator<decorators, ON_WEAK_OOP_REF>::value ||
194 HasDecorator<decorators, ON_PHANTOM_OOP_REF>::value) {
195 return;
196 }
197
198 if (ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
199 T heap_oop = RawAccess<>::oop_load(field);
200 if (!CompressedOops::is_null(heap_oop)) {
201 enqueue(CompressedOops::decode(heap_oop));
202 }
203 }
204 }
205
206 inline void ShenandoahBarrierSet::satb_enqueue(oop value) {
207 if (value != nullptr && ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
208 enqueue(value);
209 }
210 }
211
212 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
213 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
214 const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
215 const bool peek = (decorators & AS_NO_KEEPALIVE) != 0;
216 if (!peek && !on_strong_oop_ref) {
217 satb_enqueue(value);
218 }
219 }
220
221 template <DecoratorSet decorators, typename T>
222 inline void ShenandoahBarrierSet::write_ref_field_post(T* field) {
223 assert(ShenandoahCardBarrier, "Should have been checked by caller");
224 if (_heap->is_in_young(field)) {
225 // Young field stores do not require card mark.
226 return;
227 }
228 T heap_oop = RawAccess<>::oop_load(field);
229 if (CompressedOops::is_null(heap_oop)) {
230 // Null reference store do not require card mark.
231 return;
232 }
233 oop obj = CompressedOops::decode_not_null(heap_oop);
234 if (!_heap->is_in_young(obj)) {
235 // Not an old->young reference store.
236 return;
237 }
238 volatile CardTable::CardValue* byte = card_table()->byte_for(field);
239 *byte = CardTable::dirty_card_val();
240 }
241
242 template <typename T>
243 inline oop ShenandoahBarrierSet::oop_load(DecoratorSet decorators, T* addr) {
244 oop value = RawAccess<>::oop_load(addr);
245 value = load_reference_barrier(decorators, value, addr);
246 keep_alive_if_weak(decorators, value);
247 return value;
248 }
249
250 template <typename T>
251 inline oop ShenandoahBarrierSet::oop_cmpxchg(DecoratorSet decorators, T* addr, oop compare_value, oop new_value) {
252 oop res;
253 oop expected = compare_value;
254 do {
255 compare_value = expected;
256 res = RawAccess<>::oop_atomic_cmpxchg(addr, compare_value, new_value);
257 expected = res;
258 } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected)));
259
260 // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
261 // because it must be the previous value.
262 res = load_reference_barrier(decorators, res, static_cast<T*>(nullptr));
263 satb_enqueue(res);
264 return res;
265 }
266
267 template <typename T>
268 inline oop ShenandoahBarrierSet::oop_xchg(DecoratorSet decorators, T* addr, oop new_value) {
269 oop previous = RawAccess<>::oop_atomic_xchg(addr, new_value);
270 // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,
271 // because it must be the previous value.
272 previous = load_reference_barrier<T>(decorators, previous, static_cast<T*>(nullptr));
273 satb_enqueue(previous);
274 return previous;
275 }
276
277 template <DecoratorSet decorators, typename BarrierSetT>
278 template <typename T>
279 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(T* addr) {
280 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
281 ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
282 return bs->oop_load(decorators, addr);
283 }
284
285 template <DecoratorSet decorators, typename BarrierSetT>
286 template <typename T>
287 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(T* addr) {
288 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
289 ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
290 return bs->oop_load(decorators, addr);
291 }
292
293 template <DecoratorSet decorators, typename BarrierSetT>
294 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
295 ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
296 DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
297 return bs->oop_load(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset));
298 }
299
300 template <DecoratorSet decorators, typename BarrierSetT>
301 template <typename T>
302 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_common(T* addr, oop value) {
303 shenandoah_assert_marked_if(nullptr, value,
304 !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress()
305 && !(ShenandoahHeap::heap()->active_generation()->is_young()
306 && ShenandoahHeap::heap()->heap_region_containing(value)->is_old()));
307 shenandoah_assert_not_in_cset_if(addr, value, value != nullptr && !ShenandoahHeap::heap()->cancelled_gc());
308 ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
309 bs->satb_barrier<decorators>(addr);
310 Raw::oop_store(addr, value);
311 }
312
313 template <DecoratorSet decorators, typename BarrierSetT>
314 template <typename T>
315 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
316 assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
317 oop_store_common(addr, value);
318 }
319
320 template <DecoratorSet decorators, typename BarrierSetT>
321 template <typename T>
322 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(T* addr, oop value) {
323 shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
324 shenandoah_assert_not_forwarded_except (addr, value, value == nullptr || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
325
326 oop_store_common(addr, value);
327 if (ShenandoahCardBarrier) {
328 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
329 bs->write_ref_field_post<decorators>(addr);
330 }
331 }
332
333 template <DecoratorSet decorators, typename BarrierSetT>
334 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
335 oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);
336 }
337
338 template <DecoratorSet decorators, typename BarrierSetT>
339 template <typename T>
340 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) {
341 assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
342 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
343 return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
344 }
345
346 template <DecoratorSet decorators, typename BarrierSetT>
347 template <typename T>
348 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
349 assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
350 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
351 oop result = bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
352 if (ShenandoahCardBarrier) {
353 bs->write_ref_field_post<decorators>(addr);
354 }
355 return result;
356 }
357
358 template <DecoratorSet decorators, typename BarrierSetT>
359 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
360 assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
361 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
362 DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
363 auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
364 oop result = bs->oop_cmpxchg(resolved_decorators, addr, compare_value, new_value);
365 if (ShenandoahCardBarrier) {
366 bs->write_ref_field_post<decorators>(addr);
367 }
368 return result;
369 }
370
371 template <DecoratorSet decorators, typename BarrierSetT>
372 template <typename T>
373 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {
374 assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
375 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
376 return bs->oop_xchg(decorators, addr, new_value);
377 }
378
379 template <DecoratorSet decorators, typename BarrierSetT>
380 template <typename T>
381 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
382 assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
383 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
384 oop result = bs->oop_xchg(decorators, addr, new_value);
385 if (ShenandoahCardBarrier) {
386 bs->write_ref_field_post<decorators>(addr);
387 }
388 return result;
389 }
390
391 template <DecoratorSet decorators, typename BarrierSetT>
392 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
393 assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
394 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
395 DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
396 auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
397 oop result = bs->oop_xchg(resolved_decorators, addr, new_value);
398 if (ShenandoahCardBarrier) {
399 bs->write_ref_field_post<decorators>(addr);
400 }
401 return result;
402 }
403
404 // Clone barrier support
405 template <bool EVAC>
406 class ShenandoahUpdateEvacForCloneOopClosure : public BasicOopIterateClosure {
407 private:
408 ShenandoahHeap* const _heap;
409 const ShenandoahCollectionSet* const _cset;
410 Thread* const _thread;
411
412 template <class T>
413 inline void do_oop_work(T* p) {
414 T o = RawAccess<>::oop_load(p);
415 if (!CompressedOops::is_null(o)) {
416 oop obj = CompressedOops::decode_not_null(o);
417 if (_cset->is_in(obj)) {
418 oop fwd = ShenandoahForwarding::get_forwardee(obj);
419 if (EVAC && obj == fwd) {
420 fwd = _heap->evacuate_object(obj, _thread);
421 }
422 shenandoah_assert_forwarded_except(p, obj, _heap->cancelled_gc());
423 ShenandoahHeap::atomic_update_oop(fwd, p, o);
424 obj = fwd;
425 }
426 }
427 }
428
429 public:
430 ShenandoahUpdateEvacForCloneOopClosure() :
431 _heap(ShenandoahHeap::heap()),
432 _cset(_heap->collection_set()),
433 _thread(Thread::current()) {}
434
435 virtual void do_oop(oop* p) { do_oop_work(p); }
436 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
437 };
438
439 void ShenandoahBarrierSet::clone_evacuation(oop obj) {
440 assert(_heap->is_evacuation_in_progress(), "only during evacuation");
441 if (need_bulk_update(cast_from_oop<HeapWord*>(obj))) {
442 ShenandoahUpdateEvacForCloneOopClosure<true> cl;
443 obj->oop_iterate(&cl);
444 }
445 }
446
447 void ShenandoahBarrierSet::clone_update(oop obj) {
448 assert(_heap->is_update_refs_in_progress(), "only during update-refs");
449 if (need_bulk_update(cast_from_oop<HeapWord*>(obj))) {
450 ShenandoahUpdateEvacForCloneOopClosure<false> cl;
451 obj->oop_iterate(&cl);
452 }
453 }
454
455 template <DecoratorSet decorators, typename BarrierSetT>
456 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t count) {
457 // Hot code path, called from compiler/runtime. Make sure fast path is fast.
458
459 // Fix up src before doing the copy, if needed.
460 const char gc_state = ShenandoahThreadLocalData::gc_state(Thread::current());
461 if (gc_state > 0 && ShenandoahCloneBarrier) {
462 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
463 if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
464 bs->clone_evacuation(src);
465 } else if ((gc_state & ShenandoahHeap::UPDATE_REFS) != 0) {
466 bs->clone_update(src);
467 }
468 }
469
470 Raw::clone(src, dst, count);
471
472 // Safety: clone destination must be in young, otherwise we need card barriers.
473 shenandoah_assert_in_young_if(nullptr, dst, ShenandoahCardBarrier);
474 }
475
476 template <DecoratorSet decorators, typename BarrierSetT>
477 template <typename T>
478 OopCopyResult ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
479 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
480 size_t length) {
481 T* src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
482 T* dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
483
484 ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
485 bs->arraycopy_barrier(src, dst, length);
486 OopCopyResult result = Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
487 if (ShenandoahCardBarrier) {
488 bs->write_ref_array((HeapWord*) dst, length);
489 }
490 return result;
491 }
492
493 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
494 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
495 // Young cycles are allowed to run when old marking is in progress. When old marking is in progress,
496 // this barrier will be called with ENQUEUE=true and HAS_FWD=false, even though the young generation
497 // may have forwarded objects.
498 assert(HAS_FWD == _heap->has_forwarded_objects() || _heap->is_concurrent_old_mark_in_progress(), "Forwarded object status is sane");
499 // This function cannot be called to handle marking and evacuation at the same time (they operate on
500 // different sides of the copy).
501 static_assert((HAS_FWD || EVAC) != ENQUEUE, "Cannot evacuate and mark both sides of copy.");
502
503 Thread* thread = Thread::current();
504 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
505 ShenandoahMarkingContext* ctx = _heap->marking_context();
506 const ShenandoahCollectionSet* const cset = _heap->collection_set();
507 T* end = src + count;
508 for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
509 T o = RawAccess<>::oop_load(elem_ptr);
510 if (!CompressedOops::is_null(o)) {
511 oop obj = CompressedOops::decode_not_null(o);
512 if (HAS_FWD && cset->is_in(obj)) {
513 oop fwd = resolve_forwarded_not_null(obj);
514 if (EVAC && obj == fwd) {
515 fwd = _heap->evacuate_object(obj, thread);
516 }
517 shenandoah_assert_forwarded_except(elem_ptr, obj, _heap->cancelled_gc());
518 ShenandoahHeap::atomic_update_oop(fwd, elem_ptr, o);
519 }
520 if (ENQUEUE && !ctx->is_marked_strong(obj)) {
521 _satb_mark_queue_set.enqueue_known_active(queue, obj);
522 }
523 }
524 }
525 }
526
527 template <class T>
528 void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
529 if (count == 0) {
530 // No elements to copy, no need for barrier
531 return;
532 }
533
534 const char gc_state = ShenandoahThreadLocalData::gc_state(Thread::current());
535 if ((gc_state & ShenandoahHeap::MARKING) != 0) {
536 // If marking old or young, we must evaluate the SATB barrier. This will be the only
537 // action if we are not marking old. If we are marking old, we must still evaluate the
538 // load reference barrier for a young collection.
539 if (_heap->mode()->is_generational()) {
540 arraycopy_marking<true>(dst, count);
541 } else {
542 arraycopy_marking<false>(dst, count);
543 }
544 }
545
546 if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
547 assert((gc_state & ShenandoahHeap::YOUNG_MARKING) == 0, "Cannot be marking young during evacuation");
548 arraycopy_evacuation(src, count);
549 } else if ((gc_state & ShenandoahHeap::UPDATE_REFS) != 0) {
550 assert((gc_state & ShenandoahHeap::YOUNG_MARKING) == 0, "Cannot be marking young during update-refs");
551 arraycopy_update(src, count);
552 }
553 }
554
555 template <bool IS_GENERATIONAL, class T>
556 void ShenandoahBarrierSet::arraycopy_marking(T* dst, size_t count) {
557 assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
558 if (ShenandoahSATBBarrier) {
559 if (!_heap->marking_context()->allocated_after_mark_start(reinterpret_cast<HeapWord*>(dst)) ||
560 (IS_GENERATIONAL && _heap->heap_region_containing(dst)->is_old() && _heap->is_concurrent_young_mark_in_progress())) {
561 arraycopy_work<T, false, false, true>(dst, count);
562 }
563 }
564 }
565
566 inline bool ShenandoahBarrierSet::need_bulk_update(HeapWord* ary) {
567 return ary < _heap->heap_region_containing(ary)->get_update_watermark();
568 }
569
570 template <class T>
571 void ShenandoahBarrierSet::arraycopy_evacuation(T* src, size_t count) {
572 assert(_heap->is_evacuation_in_progress(), "only during evacuation");
573 if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
574 arraycopy_work<T, true, true, false>(src, count);
575 }
576 }
577
578 template <class T>
579 void ShenandoahBarrierSet::arraycopy_update(T* src, size_t count) {
580 assert(_heap->is_update_refs_in_progress(), "only during update-refs");
581 if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
582 arraycopy_work<T, true, false, false>(src, count);
583 }
584 }
585
586 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP