< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahConcurrentMark.inline.hpp

Print this page




 236   T o = RawAccess<>::oop_load(p);
 237   if (!CompressedOops::is_null(o)) {
 238     oop obj = CompressedOops::decode_not_null(o);
 239     switch (UPDATE_REFS) {
 240     case NONE:
 241       break;
 242     case RESOLVE:
 243       obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 244       break;
 245     case SIMPLE:
 246       // We piggy-back reference updating to the marking tasks.
 247       obj = heap->update_with_forwarded_not_null(p, obj);
 248       break;
 249     case CONCURRENT:
 250       obj = heap->maybe_update_with_forwarded_not_null(p, obj);
 251       break;
 252     default:
 253       ShouldNotReachHere();
 254     }
 255 
 256     // Note: Only when concurrently updating references can obj be different
 257     // (that is, really different, not just different from-/to-space copies of the same)
 258     // from the one we originally loaded. Mutator thread can beat us by writing something
 259     // else into the location. In that case, we would mark through that updated value,
 260     // on the off-chance it is not handled by other means (e.g. via SATB). However,
 261     // if that write was NULL, we don't need to do anything else.
 262     if (UPDATE_REFS != CONCURRENT || !CompressedOops::is_null(obj)) {
 263       shenandoah_assert_not_forwarded(p, obj);
 264       shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
 265 
 266       if (mark_context->mark(obj)) {
 267         bool pushed = q->push(ShenandoahMarkTask(obj));
 268         assert(pushed, "overflow queue should always succeed pushing");
 269 
 270         if ((STRING_DEDUP == ENQUEUE_DEDUP) && ShenandoahStringDedup::is_candidate(obj)) {
 271           assert(ShenandoahStringDedup::is_enabled(), "Must be enabled");
 272           ShenandoahStringDedup::enqueue_candidate(obj);
 273         }
 274       }
 275 
 276       shenandoah_assert_marked(p, obj);
 277     }
 278   }
 279 }
 280 
 281 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHCONCURRENTMARK_INLINE_HPP


 236   T o = RawAccess<>::oop_load(p);
 237   if (!CompressedOops::is_null(o)) {
 238     oop obj = CompressedOops::decode_not_null(o);
 239     switch (UPDATE_REFS) {
 240     case NONE:
 241       break;
 242     case RESOLVE:
 243       obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj);
 244       break;
 245     case SIMPLE:
 246       // We piggy-back reference updating to the marking tasks.
 247       obj = heap->update_with_forwarded_not_null(p, obj);
 248       break;
 249     case CONCURRENT:
 250       obj = heap->maybe_update_with_forwarded_not_null(p, obj);
 251       break;
 252     default:
 253       ShouldNotReachHere();
 254     }
 255 
 256     // Note: Only when concurrently updating references can obj become NULL here.
 257     // It happens when a mutator thread beats us by writing another value. In that
 258     // case we don't need to do anything else.



 259     if (UPDATE_REFS != CONCURRENT || !CompressedOops::is_null(obj)) {
 260       shenandoah_assert_not_forwarded(p, obj);
 261       shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
 262 
 263       if (mark_context->mark(obj)) {
 264         bool pushed = q->push(ShenandoahMarkTask(obj));
 265         assert(pushed, "overflow queue should always succeed pushing");
 266 
 267         if ((STRING_DEDUP == ENQUEUE_DEDUP) && ShenandoahStringDedup::is_candidate(obj)) {
 268           assert(ShenandoahStringDedup::is_enabled(), "Must be enabled");
 269           ShenandoahStringDedup::enqueue_candidate(obj);
 270         }
 271       }
 272 
 273       shenandoah_assert_marked(p, obj);
 274     }
 275   }
 276 }
 277 
 278 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHCONCURRENTMARK_INLINE_HPP
< prev index next >