< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahMark.inline.hpp

Print this page

 43 #include "utilities/powerOfTwo.hpp"
 44 
 45 template <StringDedupMode STRING_DEDUP>
 46 void ShenandoahMark::dedup_string(oop obj, StringDedup::Requests* const req) {
 47   if (STRING_DEDUP == ENQUEUE_DEDUP) {
 48     if (ShenandoahStringDedup::is_candidate(obj)) {
 49       req->add(obj);
 50     }
 51   } else if (STRING_DEDUP == ALWAYS_DEDUP) {
 52     if (ShenandoahStringDedup::is_string_candidate(obj) &&
 53         !ShenandoahStringDedup::dedup_requested(obj)) {
 54         req->add(obj);
 55     }
 56   }
 57 }
 58 
 59 template <class T, StringDedupMode STRING_DEDUP>
 60 void ShenandoahMark::do_task(ShenandoahObjToScanQueue* q, T* cl, ShenandoahLiveData* live_data, StringDedup::Requests* const req, ShenandoahMarkTask* task) {
 61   oop obj = task->obj();
 62 




 63   shenandoah_assert_not_forwarded(nullptr, obj);
 64   shenandoah_assert_marked(nullptr, obj);
 65   shenandoah_assert_not_in_cset_except(nullptr, obj, ShenandoahHeap::heap()->cancelled_gc());
 66 
 67   // Are we in weak subgraph scan?
 68   bool weak = task->is_weak();
 69   cl->set_weak(weak);
 70 
 71   if (task->is_not_chunked()) {
 72     if (obj->is_instance()) {
 73       // Case 1: Normal oop, process as usual.
 74       if (ContinuationGCSupport::relativize_stack_chunk(obj)) {
 75           // Loom doesn't support mixing of weak marking and strong marking of
 76           // stack chunks.
 77           cl->set_weak(false);
 78       }
 79 
 80       obj->oop_iterate(cl);
 81       dedup_string<STRING_DEDUP>(obj, req);
 82     } else if (obj->is_objArray()) {

 93     // Count liveness the last: push the outstanding work to the queues first
 94     // Avoid double-counting objects that are visited twice due to upgrade
 95     // from final- to strong mark.
 96     if (task->count_liveness()) {
 97       count_liveness(live_data, obj);
 98     }
 99   } else {
100     // Case 4: Array chunk, has sensible chunk id. Process it.
101     do_chunked_array<T>(q, cl, obj, task->chunk(), task->pow(), weak);
102   }
103 }
104 
105 inline void ShenandoahMark::count_liveness(ShenandoahLiveData* live_data, oop obj) {
106   ShenandoahHeap* const heap = ShenandoahHeap::heap();
107   size_t region_idx = heap->heap_region_index_containing(obj);
108   ShenandoahHeapRegion* region = heap->get_region(region_idx);
109   size_t size = obj->size();
110 
111   if (!region->is_humongous_start()) {
112     assert(!region->is_humongous(), "Cannot have continuations here");

113     ShenandoahLiveData cur = live_data[region_idx];
114     size_t new_val = size + cur;
115     if (new_val >= SHENANDOAH_LIVEDATA_MAX) {
116       // overflow, flush to region data
117       region->increase_live_data_gc_words(new_val);
118       live_data[region_idx] = 0;
119     } else {
120       // still good, remember in locals
121       live_data[region_idx] = (ShenandoahLiveData) new_val;
122     }
123   } else {
124     shenandoah_assert_in_correct_region(nullptr, obj);
125     size_t num_regions = ShenandoahHeapRegion::required_regions(size * HeapWordSize);
126 

127     for (size_t i = region_idx; i < region_idx + num_regions; i++) {
128       ShenandoahHeapRegion* chain_reg = heap->get_region(i);
129       assert(chain_reg->is_humongous(), "Expecting a humongous region");

130       chain_reg->increase_live_data_gc_words(chain_reg->used() >> LogHeapWordSize);
131     }
132   }
133 }
134 
135 template <class T>
136 inline void ShenandoahMark::do_chunked_array_start(ShenandoahObjToScanQueue* q, T* cl, oop obj, bool weak) {
137   assert(obj->is_objArray(), "expect object array");
138   objArrayOop array = objArrayOop(obj);
139   int len = array->length();
140 
141   // Mark objArray klass metadata
142   if (Devirtualizer::do_metadata(cl)) {
143     Devirtualizer::do_klass(cl, array->klass());
144   }
145 
146   if (len <= (int) ObjArrayMarkingStride*2) {
147     // A few slices only, process directly
148     array->oop_iterate_range(cl, 0, len);
149   } else {

212     pow--;
213     chunk *= 2;
214     bool pushed = q->push(ShenandoahMarkTask(array, true, weak, chunk - 1, pow));
215     assert(pushed, "overflow queue should always succeed pushing");
216   }
217 
218   int chunk_size = 1 << pow;
219 
220   int from = (chunk - 1) * chunk_size;
221   int to = chunk * chunk_size;
222 
223 #ifdef ASSERT
224   int len = array->length();
225   assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
226   assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
227 #endif
228 
229   array->oop_iterate_range(cl, from, to);
230 }
231 

232 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
233 private:
234   ShenandoahObjToScanQueue* _queue;

235   ShenandoahHeap* _heap;
236   ShenandoahMarkingContext* const _mark_context;
237 public:
238   ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q) :
239     _queue(q),

240     _heap(ShenandoahHeap::heap()),
241     _mark_context(_heap->marking_context())
242   {
243   }
244 
245   void do_buffer(void **buffer, size_t size) {
246     assert(size == 0 || !_heap->has_forwarded_objects(), "Forwarded objects are not expected here");
247     for (size_t i = 0; i < size; ++i) {
248       oop *p = (oop *) &buffer[i];
249       ShenandoahMark::mark_through_ref<oop>(p, _queue, _mark_context, false);
250     }
251   }
252 };
253 
254 template<class T>
255 inline void ShenandoahMark::mark_through_ref(T* p, ShenandoahObjToScanQueue* q, ShenandoahMarkingContext* const mark_context, bool weak) {













256   T o = RawAccess<>::oop_load(p);
257   if (!CompressedOops::is_null(o)) {
258     oop obj = CompressedOops::decode_not_null(o);
259 

260     shenandoah_assert_not_forwarded(p, obj);
261     shenandoah_assert_not_in_cset_except(p, obj, ShenandoahHeap::heap()->cancelled_gc());
262 
263     bool skip_live = false;
264     bool marked;
265     if (weak) {
266       marked = mark_context->mark_weak(obj);
267     } else {
268       marked = mark_context->mark_strong(obj, /* was_upgraded = */ skip_live);
269     }
270     if (marked) {
271       bool pushed = q->push(ShenandoahMarkTask(obj, skip_live, weak));
272       assert(pushed, "overflow queue should always succeed pushing");


















273     }


274 
275     shenandoah_assert_marked(p, obj);












276   }
277 }
278 
279 ShenandoahObjToScanQueueSet* ShenandoahMark::task_queues() const {
280   return _task_queues;
281 }
282 
283 ShenandoahObjToScanQueue* ShenandoahMark::get_queue(uint index) const {
284   return _task_queues->queue(index);
285 }








286 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP

 43 #include "utilities/powerOfTwo.hpp"
 44 
 45 template <StringDedupMode STRING_DEDUP>
 46 void ShenandoahMark::dedup_string(oop obj, StringDedup::Requests* const req) {
 47   if (STRING_DEDUP == ENQUEUE_DEDUP) {
 48     if (ShenandoahStringDedup::is_candidate(obj)) {
 49       req->add(obj);
 50     }
 51   } else if (STRING_DEDUP == ALWAYS_DEDUP) {
 52     if (ShenandoahStringDedup::is_string_candidate(obj) &&
 53         !ShenandoahStringDedup::dedup_requested(obj)) {
 54         req->add(obj);
 55     }
 56   }
 57 }
 58 
 59 template <class T, StringDedupMode STRING_DEDUP>
 60 void ShenandoahMark::do_task(ShenandoahObjToScanQueue* q, T* cl, ShenandoahLiveData* live_data, StringDedup::Requests* const req, ShenandoahMarkTask* task) {
 61   oop obj = task->obj();
 62 
 63   // TODO: This will push array chunks into the mark queue with no regard for
 64   // generations. I don't think it will break anything, but the young generation
 65   // scan might end up processing some old generation array chunks.
 66 
 67   shenandoah_assert_not_forwarded(nullptr, obj);
 68   shenandoah_assert_marked(nullptr, obj);
 69   shenandoah_assert_not_in_cset_except(nullptr, obj, ShenandoahHeap::heap()->cancelled_gc());
 70 
 71   // Are we in weak subgraph scan?
 72   bool weak = task->is_weak();
 73   cl->set_weak(weak);
 74 
 75   if (task->is_not_chunked()) {
 76     if (obj->is_instance()) {
 77       // Case 1: Normal oop, process as usual.
 78       if (ContinuationGCSupport::relativize_stack_chunk(obj)) {
 79           // Loom doesn't support mixing of weak marking and strong marking of
 80           // stack chunks.
 81           cl->set_weak(false);
 82       }
 83 
 84       obj->oop_iterate(cl);
 85       dedup_string<STRING_DEDUP>(obj, req);
 86     } else if (obj->is_objArray()) {

 97     // Count liveness the last: push the outstanding work to the queues first
 98     // Avoid double-counting objects that are visited twice due to upgrade
 99     // from final- to strong mark.
100     if (task->count_liveness()) {
101       count_liveness(live_data, obj);
102     }
103   } else {
104     // Case 4: Array chunk, has sensible chunk id. Process it.
105     do_chunked_array<T>(q, cl, obj, task->chunk(), task->pow(), weak);
106   }
107 }
108 
109 inline void ShenandoahMark::count_liveness(ShenandoahLiveData* live_data, oop obj) {
110   ShenandoahHeap* const heap = ShenandoahHeap::heap();
111   size_t region_idx = heap->heap_region_index_containing(obj);
112   ShenandoahHeapRegion* region = heap->get_region(region_idx);
113   size_t size = obj->size();
114 
115   if (!region->is_humongous_start()) {
116     assert(!region->is_humongous(), "Cannot have continuations here");
117     assert(region->affiliation() != FREE, "Do not count live data within Free Regular Region " SIZE_FORMAT, region_idx);
118     ShenandoahLiveData cur = live_data[region_idx];
119     size_t new_val = size + cur;
120     if (new_val >= SHENANDOAH_LIVEDATA_MAX) {
121       // overflow, flush to region data
122       region->increase_live_data_gc_words(new_val);
123       live_data[region_idx] = 0;
124     } else {
125       // still good, remember in locals
126       live_data[region_idx] = (ShenandoahLiveData) new_val;
127     }
128   } else {
129     shenandoah_assert_in_correct_region(nullptr, obj);
130     size_t num_regions = ShenandoahHeapRegion::required_regions(size * HeapWordSize);
131 
132     assert(region->affiliation() != FREE, "Do not count live data within FREE Humongous Start Region " SIZE_FORMAT, region_idx);
133     for (size_t i = region_idx; i < region_idx + num_regions; i++) {
134       ShenandoahHeapRegion* chain_reg = heap->get_region(i);
135       assert(chain_reg->is_humongous(), "Expecting a humongous region");
136       assert(chain_reg->affiliation() != FREE, "Do not count live data within FREE Humongous Continuation Region " SIZE_FORMAT, i);
137       chain_reg->increase_live_data_gc_words(chain_reg->used() >> LogHeapWordSize);
138     }
139   }
140 }
141 
142 template <class T>
143 inline void ShenandoahMark::do_chunked_array_start(ShenandoahObjToScanQueue* q, T* cl, oop obj, bool weak) {
144   assert(obj->is_objArray(), "expect object array");
145   objArrayOop array = objArrayOop(obj);
146   int len = array->length();
147 
148   // Mark objArray klass metadata
149   if (Devirtualizer::do_metadata(cl)) {
150     Devirtualizer::do_klass(cl, array->klass());
151   }
152 
153   if (len <= (int) ObjArrayMarkingStride*2) {
154     // A few slices only, process directly
155     array->oop_iterate_range(cl, 0, len);
156   } else {

219     pow--;
220     chunk *= 2;
221     bool pushed = q->push(ShenandoahMarkTask(array, true, weak, chunk - 1, pow));
222     assert(pushed, "overflow queue should always succeed pushing");
223   }
224 
225   int chunk_size = 1 << pow;
226 
227   int from = (chunk - 1) * chunk_size;
228   int to = chunk * chunk_size;
229 
230 #ifdef ASSERT
231   int len = array->length();
232   assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
233   assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
234 #endif
235 
236   array->oop_iterate_range(cl, from, to);
237 }
238 
239 template <GenerationMode GENERATION>
240 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
241 private:
242   ShenandoahObjToScanQueue* _queue;
243   ShenandoahObjToScanQueue* _old;
244   ShenandoahHeap* _heap;
245   ShenandoahMarkingContext* const _mark_context;
246 public:
247   ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old) :
248     _queue(q),
249     _old(old),
250     _heap(ShenandoahHeap::heap()),
251     _mark_context(_heap->marking_context())
252   {
253   }
254 
255   void do_buffer(void **buffer, size_t size) {
256     assert(size == 0 || !_heap->has_forwarded_objects() || _heap->is_concurrent_old_mark_in_progress(), "Forwarded objects are not expected here");
257     for (size_t i = 0; i < size; ++i) {
258       oop *p = (oop *) &buffer[i];
259       ShenandoahMark::mark_through_ref<oop, GENERATION>(p, _queue, _old, _mark_context, false);
260     }
261   }
262 };
263 
264 template<GenerationMode GENERATION>
265 bool ShenandoahMark::in_generation(oop obj) {
266   // Each in-line expansion of in_generation() resolves GENERATION at compile time.
267   if (GENERATION == YOUNG)
268     return ShenandoahHeap::heap()->is_in_young(obj);
269   else if (GENERATION == OLD)
270     return ShenandoahHeap::heap()->is_in_old(obj);
271   else if (GENERATION == GLOBAL)
272     return true;
273   else
274     return false;
275 }
276 
277 template<class T, GenerationMode GENERATION>
278 inline void ShenandoahMark::mark_through_ref(T *p, ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old, ShenandoahMarkingContext* const mark_context, bool weak) {
279   T o = RawAccess<>::oop_load(p);
280   if (!CompressedOops::is_null(o)) {
281     oop obj = CompressedOops::decode_not_null(o);
282 
283     ShenandoahHeap* heap = ShenandoahHeap::heap();
284     shenandoah_assert_not_forwarded(p, obj);
285     shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
286     if (in_generation<GENERATION>(obj)) {
287       mark_ref(q, mark_context, weak, obj);
288       shenandoah_assert_marked(p, obj);
289       if (heap->mode()->is_generational()) {
290         // TODO: As implemented herein, GLOBAL collections reconstruct the card table during GLOBAL concurrent
291         // marking. Note that the card table is cleaned at init_mark time so it needs to be reconstructed to support
292         // future young-gen collections.  It might be better to reconstruct card table in
293         // ShenandoahHeapRegion::global_oop_iterate_and_fill_dead.  We could either mark all live memory as dirty, or could
294         // use the GLOBAL update-refs scanning of pointers to determine precisely which cards to flag as dirty.
295         if (GENERATION == YOUNG && heap->is_in_old(p)) {
296           // Mark card as dirty because remembered set scanning still finds interesting pointer.
297           heap->mark_card_as_dirty((HeapWord*)p);
298         } else if (GENERATION == GLOBAL && heap->is_in_old(p) && heap->is_in_young(obj)) {
299           // Mark card as dirty because GLOBAL marking finds interesting pointer.
300           heap->mark_card_as_dirty((HeapWord*)p);
301         }
302       }
303     } else if (old != nullptr) {
304       // Young mark, bootstrapping old or concurrent with old marking.
305       mark_ref(old, mark_context, weak, obj);
306       shenandoah_assert_marked(p, obj);
307     } else if (GENERATION == OLD) {
308       // Old mark, found a young pointer.
309       // TODO:  Rethink this: may be redundant with dirtying of cards identified during young-gen remembered set scanning
310       // and by mutator write barriers.  Assert
311       if (heap->is_in(p)) {
312         assert(heap->is_in_young(obj), "Expected young object.");
313         heap->mark_card_as_dirty(p);
314       }
315     }
316   }
317 }
318 
319 inline void ShenandoahMark::mark_ref(ShenandoahObjToScanQueue* q,
320                               ShenandoahMarkingContext* const mark_context,
321                               bool weak, oop obj) {
322   bool skip_live = false;
323   bool marked;
324   if (weak) {
325     marked = mark_context->mark_weak(obj);
326   } else {
327     marked = mark_context->mark_strong(obj, /* was_upgraded = */ skip_live);
328   }
329   if (marked) {
330     bool pushed = q->push(ShenandoahMarkTask(obj, skip_live, weak));
331     assert(pushed, "overflow queue should always succeed pushing");
332   }
333 }
334 
335 ShenandoahObjToScanQueueSet* ShenandoahMark::task_queues() const {
336   return _task_queues;
337 }
338 
339 ShenandoahObjToScanQueue* ShenandoahMark::get_queue(uint index) const {
340   return _task_queues->queue(index);
341 }
342 
343 ShenandoahObjToScanQueue* ShenandoahMark::get_old_queue(uint index) const {
344   if (_old_gen_task_queues != nullptr) {
345     return _old_gen_task_queues->queue(index);
346   }
347   return nullptr;
348 }
349 
350 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP
< prev index next >