41 #include "utilities/powerOfTwo.hpp"
42
43 template <StringDedupMode STRING_DEDUP>
44 void ShenandoahMark::dedup_string(oop obj, StringDedup::Requests* const req) {
45 if (STRING_DEDUP == ENQUEUE_DEDUP) {
46 if (ShenandoahStringDedup::is_candidate(obj)) {
47 req->add(obj);
48 }
49 } else if (STRING_DEDUP == ALWAYS_DEDUP) {
50 if (ShenandoahStringDedup::is_string_candidate(obj) &&
51 !ShenandoahStringDedup::dedup_requested(obj)) {
52 req->add(obj);
53 }
54 }
55 }
56
57 template <class T, StringDedupMode STRING_DEDUP>
58 void ShenandoahMark::do_task(ShenandoahObjToScanQueue* q, T* cl, ShenandoahLiveData* live_data, StringDedup::Requests* const req, ShenandoahMarkTask* task) {
59 oop obj = task->obj();
60
61 shenandoah_assert_not_forwarded(NULL, obj);
62 shenandoah_assert_marked(NULL, obj);
63 shenandoah_assert_not_in_cset_except(NULL, obj, ShenandoahHeap::heap()->cancelled_gc());
64
65 // Are we in weak subgraph scan?
66 bool weak = task->is_weak();
67 cl->set_weak(weak);
68
69 if (task->is_not_chunked()) {
70 if (obj->is_instance()) {
71 // Case 1: Normal oop, process as usual.
72 obj->oop_iterate(cl);
73 dedup_string<STRING_DEDUP>(obj, req);
74 } else if (obj->is_objArray()) {
75 // Case 2: Object array instance and no chunk is set. Must be the first
76 // time we visit it, start the chunked processing.
77 do_chunked_array_start<T>(q, cl, obj, weak);
78 } else {
79 // Case 3: Primitive array. Do nothing, no oops there. We use the same
80 // performance tweak TypeArrayKlass::oop_oop_iterate_impl is using:
85 // Count liveness the last: push the outstanding work to the queues first
86 // Avoid double-counting objects that are visited twice due to upgrade
87 // from final- to strong mark.
88 if (task->count_liveness()) {
89 count_liveness(live_data, obj);
90 }
91 } else {
92 // Case 4: Array chunk, has sensible chunk id. Process it.
93 do_chunked_array<T>(q, cl, obj, task->chunk(), task->pow(), weak);
94 }
95 }
96
97 inline void ShenandoahMark::count_liveness(ShenandoahLiveData* live_data, oop obj) {
98 ShenandoahHeap* const heap = ShenandoahHeap::heap();
99 size_t region_idx = heap->heap_region_index_containing(obj);
100 ShenandoahHeapRegion* region = heap->get_region(region_idx);
101 size_t size = obj->size();
102
103 if (!region->is_humongous_start()) {
104 assert(!region->is_humongous(), "Cannot have continuations here");
105 ShenandoahLiveData cur = live_data[region_idx];
106 size_t new_val = size + cur;
107 if (new_val >= SHENANDOAH_LIVEDATA_MAX) {
108 // overflow, flush to region data
109 region->increase_live_data_gc_words(new_val);
110 live_data[region_idx] = 0;
111 } else {
112 // still good, remember in locals
113 live_data[region_idx] = (ShenandoahLiveData) new_val;
114 }
115 } else {
116 shenandoah_assert_in_correct_region(NULL, obj);
117 size_t num_regions = ShenandoahHeapRegion::required_regions(size * HeapWordSize);
118
119 for (size_t i = region_idx; i < region_idx + num_regions; i++) {
120 ShenandoahHeapRegion* chain_reg = heap->get_region(i);
121 assert(chain_reg->is_humongous(), "Expecting a humongous region");
122 chain_reg->increase_live_data_gc_words(chain_reg->used() >> LogHeapWordSize);
123 }
124 }
125 }
126
127 template <class T>
128 inline void ShenandoahMark::do_chunked_array_start(ShenandoahObjToScanQueue* q, T* cl, oop obj, bool weak) {
129 assert(obj->is_objArray(), "expect object array");
130 objArrayOop array = objArrayOop(obj);
131 int len = array->length();
132
133 // Mark objArray klass metadata
134 if (Devirtualizer::do_metadata(cl)) {
135 Devirtualizer::do_klass(cl, array->klass());
136 }
137
138 if (len <= (int) ObjArrayMarkingStride*2) {
139 // A few slices only, process directly
140 array->oop_iterate_range(cl, 0, len);
141 } else {
204 pow--;
205 chunk *= 2;
206 bool pushed = q->push(ShenandoahMarkTask(array, true, weak, chunk - 1, pow));
207 assert(pushed, "overflow queue should always succeed pushing");
208 }
209
210 int chunk_size = 1 << pow;
211
212 int from = (chunk - 1) * chunk_size;
213 int to = chunk * chunk_size;
214
215 #ifdef ASSERT
216 int len = array->length();
217 assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
218 assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
219 #endif
220
221 array->oop_iterate_range(cl, from, to);
222 }
223
224 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
225 private:
226 ShenandoahObjToScanQueue* _queue;
227 ShenandoahHeap* _heap;
228 ShenandoahMarkingContext* const _mark_context;
229 public:
230 ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q) :
231 _queue(q),
232 _heap(ShenandoahHeap::heap()),
233 _mark_context(_heap->marking_context())
234 {
235 }
236
237 void do_buffer(void **buffer, size_t size) {
238 assert(size == 0 || !_heap->has_forwarded_objects(), "Forwarded objects are not expected here");
239 for (size_t i = 0; i < size; ++i) {
240 oop *p = (oop *) &buffer[i];
241 ShenandoahMark::mark_through_ref<oop>(p, _queue, _mark_context, false);
242 }
243 }
244 };
245
246 template<class T>
247 inline void ShenandoahMark::mark_through_ref(T* p, ShenandoahObjToScanQueue* q, ShenandoahMarkingContext* const mark_context, bool weak) {
248 T o = RawAccess<>::oop_load(p);
249 if (!CompressedOops::is_null(o)) {
250 oop obj = CompressedOops::decode_not_null(o);
251
252 shenandoah_assert_not_forwarded(p, obj);
253 shenandoah_assert_not_in_cset_except(p, obj, ShenandoahHeap::heap()->cancelled_gc());
254
255 bool skip_live = false;
256 bool marked;
257 if (weak) {
258 marked = mark_context->mark_weak(obj);
259 } else {
260 marked = mark_context->mark_strong(obj, /* was_upgraded = */ skip_live);
261 }
262 if (marked) {
263 bool pushed = q->push(ShenandoahMarkTask(obj, skip_live, weak));
264 assert(pushed, "overflow queue should always succeed pushing");
265 }
266
267 shenandoah_assert_marked(p, obj);
268 }
269 }
270
271 ShenandoahObjToScanQueueSet* ShenandoahMark::task_queues() const {
272 return _task_queues;
273 }
274
275 ShenandoahObjToScanQueue* ShenandoahMark::get_queue(uint index) const {
276 return _task_queues->queue(index);
277 }
278 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP
|
41 #include "utilities/powerOfTwo.hpp"
42
43 template <StringDedupMode STRING_DEDUP>
44 void ShenandoahMark::dedup_string(oop obj, StringDedup::Requests* const req) {
45 if (STRING_DEDUP == ENQUEUE_DEDUP) {
46 if (ShenandoahStringDedup::is_candidate(obj)) {
47 req->add(obj);
48 }
49 } else if (STRING_DEDUP == ALWAYS_DEDUP) {
50 if (ShenandoahStringDedup::is_string_candidate(obj) &&
51 !ShenandoahStringDedup::dedup_requested(obj)) {
52 req->add(obj);
53 }
54 }
55 }
56
57 template <class T, StringDedupMode STRING_DEDUP>
58 void ShenandoahMark::do_task(ShenandoahObjToScanQueue* q, T* cl, ShenandoahLiveData* live_data, StringDedup::Requests* const req, ShenandoahMarkTask* task) {
59 oop obj = task->obj();
60
61 // TODO: This will push array chunks into the mark queue with no regard for
62 // generations. I don't think it will break anything, but the young generation
63 // scan might end up processing some old generation array chunks.
64
65 shenandoah_assert_not_forwarded(NULL, obj);
66 shenandoah_assert_marked(NULL, obj);
67 shenandoah_assert_not_in_cset_except(NULL, obj, ShenandoahHeap::heap()->cancelled_gc());
68
69 // Are we in weak subgraph scan?
70 bool weak = task->is_weak();
71 cl->set_weak(weak);
72
73 if (task->is_not_chunked()) {
74 if (obj->is_instance()) {
75 // Case 1: Normal oop, process as usual.
76 obj->oop_iterate(cl);
77 dedup_string<STRING_DEDUP>(obj, req);
78 } else if (obj->is_objArray()) {
79 // Case 2: Object array instance and no chunk is set. Must be the first
80 // time we visit it, start the chunked processing.
81 do_chunked_array_start<T>(q, cl, obj, weak);
82 } else {
83 // Case 3: Primitive array. Do nothing, no oops there. We use the same
84 // performance tweak TypeArrayKlass::oop_oop_iterate_impl is using:
89 // Count liveness the last: push the outstanding work to the queues first
90 // Avoid double-counting objects that are visited twice due to upgrade
91 // from final- to strong mark.
92 if (task->count_liveness()) {
93 count_liveness(live_data, obj);
94 }
95 } else {
96 // Case 4: Array chunk, has sensible chunk id. Process it.
97 do_chunked_array<T>(q, cl, obj, task->chunk(), task->pow(), weak);
98 }
99 }
100
101 inline void ShenandoahMark::count_liveness(ShenandoahLiveData* live_data, oop obj) {
102 ShenandoahHeap* const heap = ShenandoahHeap::heap();
103 size_t region_idx = heap->heap_region_index_containing(obj);
104 ShenandoahHeapRegion* region = heap->get_region(region_idx);
105 size_t size = obj->size();
106
107 if (!region->is_humongous_start()) {
108 assert(!region->is_humongous(), "Cannot have continuations here");
109 assert(region->affiliation() != FREE, "Do not count live data within Free Regular Region " SIZE_FORMAT, region_idx);
110 ShenandoahLiveData cur = live_data[region_idx];
111 size_t new_val = size + cur;
112 if (new_val >= SHENANDOAH_LIVEDATA_MAX) {
113 // overflow, flush to region data
114 region->increase_live_data_gc_words(new_val);
115 live_data[region_idx] = 0;
116 } else {
117 // still good, remember in locals
118 live_data[region_idx] = (ShenandoahLiveData) new_val;
119 }
120 } else {
121 shenandoah_assert_in_correct_region(NULL, obj);
122 size_t num_regions = ShenandoahHeapRegion::required_regions(size * HeapWordSize);
123
124 assert(region->affiliation() != FREE, "Do not count live data within FREE Humongous Start Region " SIZE_FORMAT, region_idx);
125 for (size_t i = region_idx; i < region_idx + num_regions; i++) {
126 ShenandoahHeapRegion* chain_reg = heap->get_region(i);
127 assert(chain_reg->is_humongous(), "Expecting a humongous region");
128 assert(chain_reg->affiliation() != FREE, "Do not count live data within FREE Humongous Continuation Region " SIZE_FORMAT, i);
129 chain_reg->increase_live_data_gc_words(chain_reg->used() >> LogHeapWordSize);
130 }
131 }
132 }
133
134 template <class T>
135 inline void ShenandoahMark::do_chunked_array_start(ShenandoahObjToScanQueue* q, T* cl, oop obj, bool weak) {
136 assert(obj->is_objArray(), "expect object array");
137 objArrayOop array = objArrayOop(obj);
138 int len = array->length();
139
140 // Mark objArray klass metadata
141 if (Devirtualizer::do_metadata(cl)) {
142 Devirtualizer::do_klass(cl, array->klass());
143 }
144
145 if (len <= (int) ObjArrayMarkingStride*2) {
146 // A few slices only, process directly
147 array->oop_iterate_range(cl, 0, len);
148 } else {
211 pow--;
212 chunk *= 2;
213 bool pushed = q->push(ShenandoahMarkTask(array, true, weak, chunk - 1, pow));
214 assert(pushed, "overflow queue should always succeed pushing");
215 }
216
217 int chunk_size = 1 << pow;
218
219 int from = (chunk - 1) * chunk_size;
220 int to = chunk * chunk_size;
221
222 #ifdef ASSERT
223 int len = array->length();
224 assert (0 <= from && from < len, "from is sane: %d/%d", from, len);
225 assert (0 < to && to <= len, "to is sane: %d/%d", to, len);
226 #endif
227
228 array->oop_iterate_range(cl, from, to);
229 }
230
231 template <GenerationMode GENERATION>
232 class ShenandoahSATBBufferClosure : public SATBBufferClosure {
233 private:
234 ShenandoahObjToScanQueue* _queue;
235 ShenandoahObjToScanQueue* _old;
236 ShenandoahHeap* _heap;
237 ShenandoahMarkingContext* const _mark_context;
238 public:
239 ShenandoahSATBBufferClosure(ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old) :
240 _queue(q),
241 _old(old),
242 _heap(ShenandoahHeap::heap()),
243 _mark_context(_heap->marking_context())
244 {
245 }
246
247 void do_buffer(void **buffer, size_t size) {
248 assert(size == 0 || !_heap->has_forwarded_objects() || _heap->is_concurrent_old_mark_in_progress(), "Forwarded objects are not expected here");
249 for (size_t i = 0; i < size; ++i) {
250 oop *p = (oop *) &buffer[i];
251 ShenandoahMark::mark_through_ref<oop, GENERATION>(p, _queue, _old, _mark_context, false);
252 }
253 }
254 };
255
256 template<GenerationMode GENERATION>
257 bool ShenandoahMark::in_generation(oop obj) {
258 // Each in-line expansion of in_generation() resolves GENERATION at compile time.
259 if (GENERATION == YOUNG)
260 return ShenandoahHeap::heap()->is_in_young(obj);
261 else if (GENERATION == OLD)
262 return ShenandoahHeap::heap()->is_in_old(obj);
263 else if (GENERATION == GLOBAL)
264 return true;
265 else
266 return false;
267 }
268
269 template<class T, GenerationMode GENERATION>
270 inline void ShenandoahMark::mark_through_ref(T *p, ShenandoahObjToScanQueue* q, ShenandoahObjToScanQueue* old, ShenandoahMarkingContext* const mark_context, bool weak) {
271 T o = RawAccess<>::oop_load(p);
272 if (!CompressedOops::is_null(o)) {
273 oop obj = CompressedOops::decode_not_null(o);
274
275 ShenandoahHeap* heap = ShenandoahHeap::heap();
276 shenandoah_assert_not_forwarded(p, obj);
277 shenandoah_assert_not_in_cset_except(p, obj, heap->cancelled_gc());
278 if (in_generation<GENERATION>(obj)) {
279 mark_ref(q, mark_context, weak, obj);
280 shenandoah_assert_marked(p, obj);
281 if (heap->mode()->is_generational()) {
282 // TODO: As implemented herein, GLOBAL collections reconstruct the card table during GLOBAL concurrent
283 // marking. Note that the card table is cleaned at init_mark time so it needs to be reconstructed to support
284 // future young-gen collections. It might be better to reconstruct card table in
285 // ShenandoahHeapRegion::global_oop_iterate_and_fill_dead. We could either mark all live memory as dirty, or could
286 // use the GLOBAL update-refs scanning of pointers to determine precisely which cards to flag as dirty.
287 if (GENERATION == YOUNG && heap->is_in_old(p)) {
288 // Mark card as dirty because remembered set scanning still finds interesting pointer.
289 heap->mark_card_as_dirty((HeapWord*)p);
290 } else if (GENERATION == GLOBAL && heap->is_in_old(p) && heap->is_in_young(obj)) {
291 // Mark card as dirty because GLOBAL marking finds interesting pointer.
292 heap->mark_card_as_dirty((HeapWord*)p);
293 }
294 }
295 } else if (old != nullptr) {
296 // Young mark, bootstrapping old or concurrent with old marking.
297 mark_ref(old, mark_context, weak, obj);
298 shenandoah_assert_marked(p, obj);
299 } else if (GENERATION == OLD) {
300 // Old mark, found a young pointer.
301 // TODO: Rethink this: may be redundant with dirtying of cards identified during young-gen remembered set scanning
302 // and by mutator write barriers. Assert
303 if (heap->is_in(p)) {
304 assert(heap->is_in_young(obj), "Expected young object.");
305 heap->mark_card_as_dirty(p);
306 }
307 }
308 }
309 }
310
311 inline void ShenandoahMark::mark_ref(ShenandoahObjToScanQueue* q,
312 ShenandoahMarkingContext* const mark_context,
313 bool weak, oop obj) {
314 bool skip_live = false;
315 bool marked;
316 if (weak) {
317 marked = mark_context->mark_weak(obj);
318 } else {
319 marked = mark_context->mark_strong(obj, /* was_upgraded = */ skip_live);
320 }
321 if (marked) {
322 bool pushed = q->push(ShenandoahMarkTask(obj, skip_live, weak));
323 assert(pushed, "overflow queue should always succeed pushing");
324 }
325 }
326
327 ShenandoahObjToScanQueueSet* ShenandoahMark::task_queues() const {
328 return _task_queues;
329 }
330
331 ShenandoahObjToScanQueue* ShenandoahMark::get_queue(uint index) const {
332 return _task_queues->queue(index);
333 }
334
335 ShenandoahObjToScanQueue* ShenandoahMark::get_old_queue(uint index) const {
336 if (_old_gen_task_queues != nullptr) {
337 return _old_gen_task_queues->queue(index);
338 }
339 return nullptr;
340 }
341
342 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHMARK_INLINE_HPP
|