< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp

Print this page

  1 /*
  2  * Copyright (c) 2015, 2022, Red Hat, Inc. All rights reserved.

  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 26 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 27 
 28 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 29 
 30 #include "gc/shared/accessBarrierSupport.inline.hpp"

 31 #include "gc/shenandoah/shenandoahAsserts.hpp"
 32 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
 33 #include "gc/shenandoah/shenandoahEvacOOMHandler.inline.hpp"
 34 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"

 35 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 36 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
 37 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
 38 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"


 39 #include "oops/oop.inline.hpp"
 40 
 41 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
 42   return ShenandoahForwarding::get_forwardee(p);
 43 }
 44 
 45 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
 46   if (p != nullptr) {
 47     return resolve_forwarded_not_null(p);
 48   } else {
 49     return p;
 50   }
 51 }
 52 
 53 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null_mutator(oop p) {
 54   return ShenandoahForwarding::get_forwardee_mutator(p);
 55 }
 56 
 57 template <class T>
 58 inline oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, T* load_addr) {

 86     oop fwd = resolve_forwarded_not_null(obj);
 87     if (obj == fwd && _heap->is_evacuation_in_progress()) {
 88       Thread* t = Thread::current();
 89       ShenandoahEvacOOMScope oom_evac_scope(t);
 90       return _heap->evacuate_object(obj, t);
 91     }
 92     return fwd;
 93   }
 94   return obj;
 95 }
 96 
 97 template <class T>
 98 inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators, oop obj, T* load_addr) {
 99   if (obj == nullptr) {
100     return nullptr;
101   }
102 
103   // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
104   if ((decorators & ON_PHANTOM_OOP_REF) != 0 &&
105       _heap->is_concurrent_weak_root_in_progress() &&

106       !_heap->marking_context()->is_marked(obj)) {
107     return nullptr;
108   }
109 
110   // Prevent resurrection of unreachable weak references.
111   if ((decorators & ON_WEAK_OOP_REF) != 0 &&
112       _heap->is_concurrent_weak_root_in_progress() &&

113       !_heap->marking_context()->is_marked_strong(obj)) {
114     return nullptr;
115   }
116 
117   // Prevent resurrection of unreachable objects that are visited during
118   // concurrent class-unloading.
119   if ((decorators & AS_NO_KEEPALIVE) != 0 &&
120       _heap->is_evacuation_in_progress() &&
121       !_heap->marking_context()->is_marked(obj)) {
122     return obj;
123   }
124 
125   oop fwd = load_reference_barrier(obj);
126   if (load_addr != nullptr && fwd != obj) {
127     // Since we are here and we know the load address, update the reference.
128     ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
129   }
130 
131   return fwd;
132 }

162   if (value != nullptr && ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
163     enqueue(value);
164   }
165 }
166 
167 inline void ShenandoahBarrierSet::iu_barrier(oop obj) {
168   if (ShenandoahIUBarrier && obj != nullptr && _heap->is_concurrent_mark_in_progress()) {
169     enqueue(obj);
170   }
171 }
172 
173 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
174   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
175   const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
176   const bool peek              = (decorators & AS_NO_KEEPALIVE) != 0;
177   if (!peek && !on_strong_oop_ref) {
178     satb_enqueue(value);
179   }
180 }
181 







182 template <typename T>
183 inline oop ShenandoahBarrierSet::oop_load(DecoratorSet decorators, T* addr) {
184   oop value = RawAccess<>::oop_load(addr);
185   value = load_reference_barrier(decorators, value, addr);
186   keep_alive_if_weak(decorators, value);
187   return value;
188 }
189 
190 template <typename T>
191 inline oop ShenandoahBarrierSet::oop_cmpxchg(DecoratorSet decorators, T* addr, oop compare_value, oop new_value) {
192   iu_barrier(new_value);
193   oop res;
194   oop expected = compare_value;
195   do {
196     compare_value = expected;
197     res = RawAccess<>::oop_atomic_cmpxchg(addr, compare_value, new_value);
198     expected = res;
199   } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected)));
200 
201   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,

225 }
226 
227 template <DecoratorSet decorators, typename BarrierSetT>
228 template <typename T>
229 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(T* addr) {
230   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
231   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
232   return bs->oop_load(decorators, addr);
233 }
234 
235 template <DecoratorSet decorators, typename BarrierSetT>
236 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
237   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
238   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
239   return bs->oop_load(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset));
240 }
241 
242 template <DecoratorSet decorators, typename BarrierSetT>
243 template <typename T>
244 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_common(T* addr, oop value) {
245   shenandoah_assert_marked_if(nullptr, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress());




246   shenandoah_assert_not_in_cset_if(addr, value, value != nullptr && !ShenandoahHeap::heap()->cancelled_gc());
247   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
248   bs->iu_barrier(value);
249   bs->satb_barrier<decorators>(addr);
250   Raw::oop_store(addr, value);
251 }
252 
253 template <DecoratorSet decorators, typename BarrierSetT>
254 template <typename T>
255 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
256   oop_store_common(addr, value);
257 }
258 
259 template <DecoratorSet decorators, typename BarrierSetT>
260 template <typename T>
261 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(T* addr, oop value) {
262   shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
263   shenandoah_assert_not_forwarded_except  (addr, value, value == nullptr || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
264 
265   oop_store_common(addr, value);




266 }
267 
268 template <DecoratorSet decorators, typename BarrierSetT>
269 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
270   oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);
271 }
272 
273 template <DecoratorSet decorators, typename BarrierSetT>
274 template <typename T>
275 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) {
276   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
277   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
278   return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
279 }
280 
281 template <DecoratorSet decorators, typename BarrierSetT>
282 template <typename T>
283 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
284   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
285   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
286   return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);




287 }
288 
289 template <DecoratorSet decorators, typename BarrierSetT>
290 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
291   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
292   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
293   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
294   return bs->oop_cmpxchg(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset), compare_value, new_value);





295 }
296 
297 template <DecoratorSet decorators, typename BarrierSetT>
298 template <typename T>
299 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {
300   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
301   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
302   return bs->oop_xchg(decorators, addr, new_value);
303 }
304 
305 template <DecoratorSet decorators, typename BarrierSetT>
306 template <typename T>
307 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
308   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
309   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
310   return bs->oop_xchg(decorators, addr, new_value);




311 }
312 
313 template <DecoratorSet decorators, typename BarrierSetT>
314 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
315   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
316   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
317   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
318   return bs->oop_xchg(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset), new_value);





319 }
320 
321 // Clone barrier support
322 template <DecoratorSet decorators, typename BarrierSetT>
323 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
324   if (ShenandoahCloneBarrier) {
325     ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
326   }
327   Raw::clone(src, dst, size);
328 }
329 
330 template <DecoratorSet decorators, typename BarrierSetT>
331 template <typename T>
332 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
333                                                                                          arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
334                                                                                          size_t length) {



335   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
336   bs->arraycopy_barrier(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
337                         arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
338                         length);
339   return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);


340 }
341 
342 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
343 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
344   assert(HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");


345 
346   Thread* thread = Thread::current();
347   SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
348   ShenandoahMarkingContext* ctx = _heap->marking_context();
349   const ShenandoahCollectionSet* const cset = _heap->collection_set();
350   T* end = src + count;
351   for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
352     T o = RawAccess<>::oop_load(elem_ptr);
353     if (!CompressedOops::is_null(o)) {
354       oop obj = CompressedOops::decode_not_null(o);
355       if (HAS_FWD && cset->is_in(obj)) {
356         oop fwd = resolve_forwarded_not_null(obj);
357         if (EVAC && obj == fwd) {
358           fwd = _heap->evacuate_object(obj, thread);
359         }
360         shenandoah_assert_forwarded_except(elem_ptr, obj, _heap->cancelled_gc());
361         ShenandoahHeap::atomic_update_oop(fwd, elem_ptr, o);
362         obj = fwd;
363       }
364       if (ENQUEUE && !ctx->is_marked_strong(obj)) {
365         _satb_mark_queue_set.enqueue_known_active(queue, obj);
366       }
367     }
368   }
369 }
370 
371 template <class T>
372 void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
373   if (count == 0) {
374     return;
375   }
376   int gc_state = _heap->gc_state();
377   if ((gc_state & ShenandoahHeap::MARKING) != 0) {
378     arraycopy_marking(src, dst, count);
379   } else if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {



380     arraycopy_evacuation(src, count);
381   } else if ((gc_state & ShenandoahHeap::UPDATEREFS) != 0) {
382     arraycopy_update(src, count);
383   }











384 }
385 
386 template <class T>
387 void ShenandoahBarrierSet::arraycopy_marking(T* src, T* dst, size_t count) {
388   assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
389   T* array = ShenandoahSATBBarrier ? dst : src;
390   if (!_heap->marking_context()->allocated_after_mark_start(reinterpret_cast<HeapWord*>(array))) {
391     arraycopy_work<T, false, false, true>(array, count);













































392   }
393 }
394 
395 inline bool ShenandoahBarrierSet::need_bulk_update(HeapWord* ary) {
396   return ary < _heap->heap_region_containing(ary)->get_update_watermark();
397 }
398 
399 template <class T>
400 void ShenandoahBarrierSet::arraycopy_evacuation(T* src, size_t count) {
401   assert(_heap->is_evacuation_in_progress(), "only during evacuation");
402   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
403     ShenandoahEvacOOMScope oom_evac;
404     arraycopy_work<T, true, true, false>(src, count);
405   }
406 }
407 
408 template <class T>
409 void ShenandoahBarrierSet::arraycopy_update(T* src, size_t count) {
410   assert(_heap->is_update_refs_in_progress(), "only during update-refs");
411   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {

  1 /*
  2  * Copyright (c) 2015, 2022, Red Hat, Inc. All rights reserved.
  3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 27 #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
 28 
 29 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 30 
 31 #include "gc/shared/accessBarrierSupport.inline.hpp"
 32 #include "gc/shared/cardTable.hpp"
 33 #include "gc/shenandoah/shenandoahAsserts.hpp"
 34 #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
 35 #include "gc/shenandoah/shenandoahEvacOOMHandler.inline.hpp"
 36 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
 37 #include "gc/shenandoah/shenandoahGeneration.hpp"
 38 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 39 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
 40 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
 41 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 42 #include "gc/shenandoah/mode/shenandoahMode.hpp"
 43 #include "memory/iterator.inline.hpp"
 44 #include "oops/oop.inline.hpp"
 45 
 46 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
 47   return ShenandoahForwarding::get_forwardee(p);
 48 }
 49 
 50 inline oop ShenandoahBarrierSet::resolve_forwarded(oop p) {
 51   if (p != nullptr) {
 52     return resolve_forwarded_not_null(p);
 53   } else {
 54     return p;
 55   }
 56 }
 57 
 58 inline oop ShenandoahBarrierSet::resolve_forwarded_not_null_mutator(oop p) {
 59   return ShenandoahForwarding::get_forwardee_mutator(p);
 60 }
 61 
 62 template <class T>
 63 inline oop ShenandoahBarrierSet::load_reference_barrier_mutator(oop obj, T* load_addr) {

 91     oop fwd = resolve_forwarded_not_null(obj);
 92     if (obj == fwd && _heap->is_evacuation_in_progress()) {
 93       Thread* t = Thread::current();
 94       ShenandoahEvacOOMScope oom_evac_scope(t);
 95       return _heap->evacuate_object(obj, t);
 96     }
 97     return fwd;
 98   }
 99   return obj;
100 }
101 
102 template <class T>
103 inline oop ShenandoahBarrierSet::load_reference_barrier(DecoratorSet decorators, oop obj, T* load_addr) {
104   if (obj == nullptr) {
105     return nullptr;
106   }
107 
108   // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
109   if ((decorators & ON_PHANTOM_OOP_REF) != 0 &&
110       _heap->is_concurrent_weak_root_in_progress() &&
111       _heap->is_in_active_generation(obj) &&
112       !_heap->marking_context()->is_marked(obj)) {
113     return nullptr;
114   }
115 
116   // Prevent resurrection of unreachable weak references.
117   if ((decorators & ON_WEAK_OOP_REF) != 0 &&
118       _heap->is_concurrent_weak_root_in_progress() &&
119       _heap->is_in_active_generation(obj) &&
120       !_heap->marking_context()->is_marked_strong(obj)) {
121     return nullptr;
122   }
123 
124   // Prevent resurrection of unreachable objects that are visited during
125   // concurrent class-unloading.
126   if ((decorators & AS_NO_KEEPALIVE) != 0 &&
127       _heap->is_evacuation_in_progress() &&
128       !_heap->marking_context()->is_marked(obj)) {
129     return obj;
130   }
131 
132   oop fwd = load_reference_barrier(obj);
133   if (load_addr != nullptr && fwd != obj) {
134     // Since we are here and we know the load address, update the reference.
135     ShenandoahHeap::atomic_update_oop(fwd, load_addr, obj);
136   }
137 
138   return fwd;
139 }

169   if (value != nullptr && ShenandoahSATBBarrier && _heap->is_concurrent_mark_in_progress()) {
170     enqueue(value);
171   }
172 }
173 
174 inline void ShenandoahBarrierSet::iu_barrier(oop obj) {
175   if (ShenandoahIUBarrier && obj != nullptr && _heap->is_concurrent_mark_in_progress()) {
176     enqueue(obj);
177   }
178 }
179 
180 inline void ShenandoahBarrierSet::keep_alive_if_weak(DecoratorSet decorators, oop value) {
181   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Reference strength must be known");
182   const bool on_strong_oop_ref = (decorators & ON_STRONG_OOP_REF) != 0;
183   const bool peek              = (decorators & AS_NO_KEEPALIVE) != 0;
184   if (!peek && !on_strong_oop_ref) {
185     satb_enqueue(value);
186   }
187 }
188 
189 template <DecoratorSet decorators, typename T>
190 inline void ShenandoahBarrierSet::write_ref_field_post(T* field) {
191   assert(ShenandoahCardBarrier, "Did you mean to enable ShenandoahCardBarrier?");
192   volatile CardTable::CardValue* byte = card_table()->byte_for(field);
193   *byte = CardTable::dirty_card_val();
194 }
195 
196 template <typename T>
197 inline oop ShenandoahBarrierSet::oop_load(DecoratorSet decorators, T* addr) {
198   oop value = RawAccess<>::oop_load(addr);
199   value = load_reference_barrier(decorators, value, addr);
200   keep_alive_if_weak(decorators, value);
201   return value;
202 }
203 
204 template <typename T>
205 inline oop ShenandoahBarrierSet::oop_cmpxchg(DecoratorSet decorators, T* addr, oop compare_value, oop new_value) {
206   iu_barrier(new_value);
207   oop res;
208   oop expected = compare_value;
209   do {
210     compare_value = expected;
211     res = RawAccess<>::oop_atomic_cmpxchg(addr, compare_value, new_value);
212     expected = res;
213   } while ((compare_value != expected) && (resolve_forwarded(compare_value) == resolve_forwarded(expected)));
214 
215   // Note: We don't need a keep-alive-barrier here. We already enqueue any loaded reference for SATB anyway,

239 }
240 
241 template <DecoratorSet decorators, typename BarrierSetT>
242 template <typename T>
243 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(T* addr) {
244   assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "must be absent");
245   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
246   return bs->oop_load(decorators, addr);
247 }
248 
249 template <DecoratorSet decorators, typename BarrierSetT>
250 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
251   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
252   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
253   return bs->oop_load(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset));
254 }
255 
256 template <DecoratorSet decorators, typename BarrierSetT>
257 template <typename T>
258 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_common(T* addr, oop value) {
259   shenandoah_assert_marked_if(nullptr, value,
260                               !CompressedOops::is_null(value) &&
261                               ShenandoahHeap::heap()->is_evacuation_in_progress() &&
262                               !(ShenandoahHeap::heap()->active_generation()->is_young() &&
263                               ShenandoahHeap::heap()->heap_region_containing(value)->is_old()));
264   shenandoah_assert_not_in_cset_if(addr, value, value != nullptr && !ShenandoahHeap::heap()->cancelled_gc());
265   ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
266   bs->iu_barrier(value);
267   bs->satb_barrier<decorators>(addr);
268   Raw::oop_store(addr, value);
269 }
270 
271 template <DecoratorSet decorators, typename BarrierSetT>
272 template <typename T>
273 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
274   oop_store_common(addr, value);
275 }
276 
277 template <DecoratorSet decorators, typename BarrierSetT>
278 template <typename T>
279 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(T* addr, oop value) {
280   shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
281   shenandoah_assert_not_forwarded_except  (addr, value, value == nullptr || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
282 
283   oop_store_common(addr, value);
284   if (ShenandoahCardBarrier) {
285     ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
286     bs->write_ref_field_post<decorators>(addr);
287   }
288 }
289 
290 template <DecoratorSet decorators, typename BarrierSetT>
291 inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
292   oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);
293 }
294 
295 template <DecoratorSet decorators, typename BarrierSetT>
296 template <typename T>
297 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(T* addr, oop compare_value, oop new_value) {
298   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
299   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
300   return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
301 }
302 
303 template <DecoratorSet decorators, typename BarrierSetT>
304 template <typename T>
305 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
306   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
307   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
308   oop result = bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
309   if (ShenandoahCardBarrier) {
310     bs->write_ref_field_post<decorators>(addr);
311   }
312   return result;
313 }
314 
315 template <DecoratorSet decorators, typename BarrierSetT>
316 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
317   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
318   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
319   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
320   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
321   oop result = bs->oop_cmpxchg(resolved_decorators, addr, compare_value, new_value);
322   if (ShenandoahCardBarrier) {
323     bs->write_ref_field_post<decorators>(addr);
324   }
325   return result;
326 }
327 
328 template <DecoratorSet decorators, typename BarrierSetT>
329 template <typename T>
330 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {
331   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
332   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
333   return bs->oop_xchg(decorators, addr, new_value);
334 }
335 
336 template <DecoratorSet decorators, typename BarrierSetT>
337 template <typename T>
338 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
339   assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
340   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
341   oop result = bs->oop_xchg(decorators, addr, new_value);
342   if (ShenandoahCardBarrier) {
343     bs->write_ref_field_post<decorators>(addr);
344   }
345   return result;
346 }
347 
348 template <DecoratorSet decorators, typename BarrierSetT>
349 inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
350   assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
351   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
352   DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
353   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
354   oop result = bs->oop_xchg(resolved_decorators, addr, new_value);
355   if (ShenandoahCardBarrier) {
356     bs->write_ref_field_post<decorators>(addr);
357   }
358   return result;
359 }
360 
361 // Clone barrier support
362 template <DecoratorSet decorators, typename BarrierSetT>
363 void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
364   if (ShenandoahCloneBarrier) {
365     ShenandoahBarrierSet::barrier_set()->clone_barrier_runtime(src);
366   }
367   Raw::clone(src, dst, size);
368 }
369 
370 template <DecoratorSet decorators, typename BarrierSetT>
371 template <typename T>
372 bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
373                                                                                          arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
374                                                                                          size_t length) {
375   T* src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
376   T* dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
377 
378   ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
379   bs->arraycopy_barrier(src, dst, length);
380   bool result = Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
381   if (ShenandoahCardBarrier) {
382     bs->write_ref_array((HeapWord*) dst, length);
383   }
384   return result;
385 }
386 
387 template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
388 void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
389   // We allow forwarding in young generation and marking in old generation
390   // to happen simultaneously.
391   assert(_heap->mode()->is_generational() || HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
392 
393   Thread* thread = Thread::current();
394   SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
395   ShenandoahMarkingContext* ctx = _heap->marking_context();
396   const ShenandoahCollectionSet* const cset = _heap->collection_set();
397   T* end = src + count;
398   for (T* elem_ptr = src; elem_ptr < end; elem_ptr++) {
399     T o = RawAccess<>::oop_load(elem_ptr);
400     if (!CompressedOops::is_null(o)) {
401       oop obj = CompressedOops::decode_not_null(o);
402       if (HAS_FWD && cset->is_in(obj)) {
403         oop fwd = resolve_forwarded_not_null(obj);
404         if (EVAC && obj == fwd) {
405           fwd = _heap->evacuate_object(obj, thread);
406         }
407         shenandoah_assert_forwarded_except(elem_ptr, obj, _heap->cancelled_gc());
408         ShenandoahHeap::atomic_update_oop(fwd, elem_ptr, o);
409         obj = fwd;
410       }
411       if (ENQUEUE && !ctx->is_marked_strong_or_old(obj)) {
412         _satb_mark_queue_set.enqueue_known_active(queue, obj);
413       }
414     }
415   }
416 }
417 
418 template <class T>
419 void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
420   if (count == 0) {
421     return;
422   }
423   int gc_state = _heap->gc_state();
424   if ((gc_state & ShenandoahHeap::YOUNG_MARKING) != 0) {
425     arraycopy_marking(src, dst, count, false);
426     return;
427   }
428 
429   if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
430     arraycopy_evacuation(src, count);
431   } else if ((gc_state & ShenandoahHeap::UPDATEREFS) != 0) {
432     arraycopy_update(src, count);
433   }
434 
435   if (_heap->mode()->is_generational()) {
436     assert(ShenandoahSATBBarrier, "Generational mode assumes SATB mode");
437     // TODO: Could we optimize here by checking that dst is in an old region?
438     if ((gc_state & ShenandoahHeap::OLD_MARKING) != 0) {
439       // Note that we can't do the arraycopy marking using the 'src' array when
440       // SATB mode is enabled (so we can't do this as part of the iteration for
441       // evacuation or update references).
442       arraycopy_marking(src, dst, count, true);
443     }
444   }
445 }
446 
447 template <class T>
448 void ShenandoahBarrierSet::arraycopy_marking(T* src, T* dst, size_t count, bool is_old_marking) {
449   assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
450   /*
451    * Note that an old-gen object is considered live if it is live at the start of OLD marking or if it is promoted
452    * following the start of OLD marking.
453    *
454    * 1. Every object promoted following the start of OLD marking will be above TAMS within its old-gen region
455    * 2. Every object live at the start of OLD marking will be referenced from a "root" or it will be referenced from
456    *    another live OLD-gen object.  With regards to old-gen, roots include stack locations and all of live young-gen.
457    *    All root references to old-gen are identified during a bootstrap young collection.  All references from other
458    *    old-gen objects will be marked during the traversal of all old objects, or will be marked by the SATB barrier.
459    *
460    * During old-gen marking (which is interleaved with young-gen collections), call arraycopy_work() if:
461    *
462    * 1. The overwritten array resides in old-gen and it is below TAMS within its old-gen region
463    * 2. Do not call arraycopy_work for any array residing in young-gen because young-gen collection is idle at this time
464    *
465    * During young-gen marking, call arraycopy_work() if:
466    *
467    * 1. The overwritten array resides in young-gen and is below TAMS within its young-gen region
468    * 2. Additionally, if array resides in old-gen, regardless of its relationship to TAMS because this old-gen array
469    *    may hold references to young-gen
470    */
471   if (ShenandoahSATBBarrier) {
472     T* array = dst;
473     HeapWord* array_addr = reinterpret_cast<HeapWord*>(array);
474     ShenandoahHeapRegion* r = _heap->heap_region_containing(array_addr);
475     if (is_old_marking) {
476       // Generational, old marking
477       assert(_heap->mode()->is_generational(), "Invariant");
478       if (r->is_old() && (array_addr < _heap->marking_context()->top_at_mark_start(r))) {
479         arraycopy_work<T, false, false, true>(array, count);
480       }
481     } else if (_heap->mode()->is_generational()) {
482       // Generational, young marking
483       if (r->is_old() || (array_addr < _heap->marking_context()->top_at_mark_start(r))) {
484         arraycopy_work<T, false, false, true>(array, count);
485       }
486     } else if (array_addr < _heap->marking_context()->top_at_mark_start(r)) {
487       // Non-generational, marking
488       arraycopy_work<T, false, false, true>(array, count);
489     }
490   } else {
491     // Incremental Update mode, marking
492     T* array = src;
493     HeapWord* array_addr = reinterpret_cast<HeapWord*>(array);
494     ShenandoahHeapRegion* r = _heap->heap_region_containing(array_addr);
495     if (array_addr < _heap->marking_context()->top_at_mark_start(r)) {
496       arraycopy_work<T, false, false, true>(array, count);
497     }
498   }
499 }
500 
501 inline bool ShenandoahBarrierSet::need_bulk_update(HeapWord* ary) {
502   return ary < _heap->heap_region_containing(ary)->get_update_watermark();
503 }
504 
505 template <class T>
506 void ShenandoahBarrierSet::arraycopy_evacuation(T* src, size_t count) {
507   assert(_heap->is_evacuation_in_progress(), "only during evacuation");
508   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
509     ShenandoahEvacOOMScope oom_evac;
510     arraycopy_work<T, true, true, false>(src, count);
511   }
512 }
513 
514 template <class T>
515 void ShenandoahBarrierSet::arraycopy_update(T* src, size_t count) {
516   assert(_heap->is_update_refs_in_progress(), "only during update-refs");
517   if (need_bulk_update(reinterpret_cast<HeapWord*>(src))) {
< prev index next >