< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.inline.hpp

Print this page
@@ -26,18 +26,21 @@
  #define SHARE_GC_SHENANDOAH_SHENANDOAHBARRIERSET_INLINE_HPP
  
  #include "gc/shenandoah/shenandoahBarrierSet.hpp"
  
  #include "gc/shared/accessBarrierSupport.inline.hpp"
+ #include "gc/shared/cardTable.hpp"
  #include "gc/shenandoah/shenandoahAsserts.hpp"
  #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp"
  #include "gc/shenandoah/shenandoahEvacOOMHandler.inline.hpp"
  #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  #include "gc/shenandoah/shenandoahHeapRegion.hpp"
  #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp"
  #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
+ #include "gc/shenandoah/mode/shenandoahMode.hpp"
+ #include "memory/iterator.inline.hpp"
  #include "oops/oop.inline.hpp"
  
  inline oop ShenandoahBarrierSet::resolve_forwarded_not_null(oop p) {
    return ShenandoahForwarding::get_forwardee(p);
  }

@@ -106,17 +109,19 @@
    }
  
    // Prevent resurrection of unreachable phantom (i.e. weak-native) references.
    if ((decorators & ON_PHANTOM_OOP_REF) != 0 &&
        _heap->is_concurrent_weak_root_in_progress() &&
+       _heap->is_in_active_generation(obj) &&
        !_heap->marking_context()->is_marked(obj)) {
      return NULL;
    }
  
    // Prevent resurrection of unreachable weak references.
    if ((decorators & ON_WEAK_OOP_REF) != 0 &&
        _heap->is_concurrent_weak_root_in_progress() &&
+       _heap->is_in_active_generation(obj) &&
        !_heap->marking_context()->is_marked_strong(obj)) {
      return NULL;
    }
  
    // Prevent resurrection of unreachable objects that are visited during

@@ -182,10 +187,18 @@
    if (!peek && !on_strong_oop_ref) {
      satb_enqueue(value);
    }
  }
  
+ template <DecoratorSet decorators, typename T>
+ inline void ShenandoahBarrierSet::write_ref_field_post(T* field, oop newVal) {
+   if (ShenandoahHeap::heap()->mode()->is_generational()) {
+     volatile CardTable::CardValue* byte = card_table()->byte_for(field);
+     *byte = CardTable::dirty_card_val();
+   }
+ }
+ 
  template <typename T>
  inline oop ShenandoahBarrierSet::oop_load(DecoratorSet decorators, T* addr) {
    oop value = RawAccess<>::oop_load(addr);
    value = load_reference_barrier(decorators, value, addr);
    keep_alive_if_weak(decorators, value);

@@ -245,11 +258,12 @@
  }
  
  template <DecoratorSet decorators, typename BarrierSetT>
  template <typename T>
  inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(T* addr, oop value) {
-   shenandoah_assert_marked_if(NULL, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress());
+   shenandoah_assert_marked_if(NULL, value, !CompressedOops::is_null(value) && ShenandoahHeap::heap()->is_evacuation_in_progress() &&
+                               !(ShenandoahHeap::heap()->is_gc_generation_young() && ShenandoahHeap::heap()->heap_region_containing(value)->is_old()));
    shenandoah_assert_not_in_cset_if(addr, value, value != NULL && !ShenandoahHeap::heap()->cancelled_gc());
    ShenandoahBarrierSet* const bs = ShenandoahBarrierSet::barrier_set();
    bs->iu_barrier(value);
    bs->satb_barrier<decorators>(addr);
    Raw::oop_store(addr, value);

@@ -261,10 +275,11 @@
    shenandoah_assert_not_in_cset_loc_except(addr, ShenandoahHeap::heap()->cancelled_gc());
    shenandoah_assert_not_forwarded_except  (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
    shenandoah_assert_not_in_cset_except    (addr, value, value == NULL || ShenandoahHeap::heap()->cancelled_gc() || !ShenandoahHeap::heap()->is_concurrent_mark_in_progress());
  
    oop_store_not_in_heap(addr, value);
+   ShenandoahBarrierSet::barrier_set()->write_ref_field_post<decorators>(addr, value);
  }
  
  template <DecoratorSet decorators, typename BarrierSetT>
  inline void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
    oop_store_in_heap(AccessInternal::oop_field_addr<decorators>(base, offset), value);

@@ -281,19 +296,24 @@
  template <DecoratorSet decorators, typename BarrierSetT>
  template <typename T>
  inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(T* addr, oop compare_value, oop new_value) {
    assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
    ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
-   return bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
+   oop result = bs->oop_cmpxchg(decorators, addr, compare_value, new_value);
+   bs->write_ref_field_post<decorators>(addr, new_value);
+   return result;
  }
  
  template <DecoratorSet decorators, typename BarrierSetT>
  inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
    assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
    ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
    DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
-   return bs->oop_cmpxchg(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset), compare_value, new_value);
+   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
+   oop result = bs->oop_cmpxchg(resolved_decorators, addr, compare_value, new_value);
+   bs->write_ref_field_post<decorators>(addr, new_value);
+   return result;
  }
  
  template <DecoratorSet decorators, typename BarrierSetT>
  template <typename T>
  inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(T* addr, oop new_value) {

@@ -305,19 +325,24 @@
  template <DecoratorSet decorators, typename BarrierSetT>
  template <typename T>
  inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(T* addr, oop new_value) {
    assert((decorators & (AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF)) == 0, "must be absent");
    ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
-   return bs->oop_xchg(decorators, addr, new_value);
+   oop result = bs->oop_xchg(decorators, addr, new_value);
+   bs->write_ref_field_post<decorators>(addr, new_value);
+   return result;
  }
  
  template <DecoratorSet decorators, typename BarrierSetT>
  inline oop ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
    assert((decorators & AS_NO_KEEPALIVE) == 0, "must be absent");
    ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
    DecoratorSet resolved_decorators = AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
-   return bs->oop_xchg(resolved_decorators, AccessInternal::oop_field_addr<decorators>(base, offset), new_value);
+   auto addr = AccessInternal::oop_field_addr<decorators>(base, offset);
+   oop result = bs->oop_xchg(resolved_decorators, addr, new_value);
+   bs->write_ref_field_post<decorators>(addr, new_value);
+   return result;
  }
  
  // Clone barrier support
  template <DecoratorSet decorators, typename BarrierSetT>
  void ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {

@@ -330,20 +355,25 @@
  template <DecoratorSet decorators, typename BarrierSetT>
  template <typename T>
  bool ShenandoahBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
                                                                                           arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
                                                                                           size_t length) {
+   T* src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
+   T* dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
+ 
    ShenandoahBarrierSet* bs = ShenandoahBarrierSet::barrier_set();
-   bs->arraycopy_barrier(arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw),
-                         arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw),
-                         length);
-   return Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
+   bs->arraycopy_barrier(src, dst, length);
+   bool result = Raw::oop_arraycopy_in_heap(src_obj, src_offset_in_bytes, src_raw, dst_obj, dst_offset_in_bytes, dst_raw, length);
+   bs->write_ref_array((HeapWord*) dst, length);
+   return result;
  }
  
  template <class T, bool HAS_FWD, bool EVAC, bool ENQUEUE>
  void ShenandoahBarrierSet::arraycopy_work(T* src, size_t count) {
-   assert(HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
+   // We allow forwarding in young generation and marking in old generation
+   // to happen simultaneously.
+   assert(_heap->mode()->is_generational() || HAS_FWD == _heap->has_forwarded_objects(), "Forwarded object status is sane");
  
    Thread* thread = Thread::current();
    SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
    ShenandoahMarkingContext* ctx = _heap->marking_context();
    const ShenandoahCollectionSet* const cset = _heap->collection_set();

@@ -359,11 +389,11 @@
          }
          assert(obj != fwd || _heap->cancelled_gc(), "must be forwarded");
          ShenandoahHeap::atomic_update_oop(fwd, elem_ptr, o);
          obj = fwd;
        }
-       if (ENQUEUE && !ctx->is_marked_strong(obj)) {
+       if (ENQUEUE && !ctx->is_marked_strong_or_old(obj)) {
          _satb_mark_queue_set.enqueue_known_active(queue, obj);
        }
      }
    }
  }

@@ -372,17 +402,31 @@
  void ShenandoahBarrierSet::arraycopy_barrier(T* src, T* dst, size_t count) {
    if (count == 0) {
      return;
    }
    int gc_state = _heap->gc_state();
-   if ((gc_state & ShenandoahHeap::MARKING) != 0) {
+   if ((gc_state & ShenandoahHeap::YOUNG_MARKING) != 0) {
      arraycopy_marking(src, dst, count);
-   } else if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
+     return;
+   }
+ 
+   if ((gc_state & ShenandoahHeap::EVACUATION) != 0) {
      arraycopy_evacuation(src, count);
    } else if ((gc_state & ShenandoahHeap::UPDATEREFS) != 0) {
      arraycopy_update(src, count);
    }
+ 
+   if (_heap->mode()->is_generational()) {
+     assert(ShenandoahSATBBarrier, "Generational mode assumes SATB mode");
+     // TODO: Could we optimize here by checking that dst is in an old region?
+     if ((gc_state & ShenandoahHeap::OLD_MARKING) != 0) {
+       // Note that we can't do the arraycopy marking using the 'src' array when
+       // SATB mode is enabled (so we can't do this as part of the iteration for
+       // evacuation or update references).
+       arraycopy_marking(src, dst, count);
+     }
+   }
  }
  
  template <class T>
  void ShenandoahBarrierSet::arraycopy_marking(T* src, T* dst, size_t count) {
    assert(_heap->is_concurrent_mark_in_progress(), "only during marking");
< prev index next >