228 // Coming out of Full GC, we would not have any forwarded objects.
229 // This also prevents resolves with fwdptr from kicking in while adjusting pointers in phase3.
230 heap->set_has_forwarded_objects(false);
231
232 heap->set_full_gc_move_in_progress(true);
233
234 // Setup workers for the rest
235 OrderAccess::fence();
236
237 // Initialize worker slices
238 ShenandoahHeapRegionSet** worker_slices = NEW_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, heap->max_workers(), mtGC);
239 for (uint i = 0; i < heap->max_workers(); i++) {
240 worker_slices[i] = new ShenandoahHeapRegionSet();
241 }
242
243 {
244 // The rest of code performs region moves, where region status is undefined
245 // until all phases run together.
246 ShenandoahHeapLocker lock(heap->lock());
247
248 phase2_calculate_target_addresses(worker_slices);
249
250 OrderAccess::fence();
251
252 phase3_update_references();
253
254 phase4_compact_objects(worker_slices);
255
256 phase5_epilog();
257 }
258
259 // Resize metaspace
260 MetaspaceGC::compute_new_size();
261
262 // Free worker slices
263 for (uint i = 0; i < heap->max_workers(); i++) {
264 delete worker_slices[i];
265 }
266 FREE_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, worker_slices);
267
268 heap->set_full_gc_move_in_progress(false);
269 heap->set_full_gc_in_progress(false);
270
271 if (ShenandoahVerify) {
272 heap->verifier()->verify_after_fullgc();
273 }
274
275 if (VerifyAfterGC) {
276 Universe::verify();
349 }
350
351 void finish() {
352 assert(_to_region != nullptr, "should not happen");
353 _to_region->set_new_top(_compact_point);
354 }
355
356 bool is_compact_same_region() {
357 return _from_region == _to_region;
358 }
359
360 int empty_regions_pos() {
361 return _empty_regions_pos;
362 }
363
364 void do_object(oop p) {
365 assert(_from_region != nullptr, "must set before work");
366 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
367 assert(!_heap->complete_marking_context()->allocated_after_mark_start(p), "must be truly marked");
368
369 size_t obj_size = p->size();
370 if (_compact_point + obj_size > _to_region->end()) {
371 finish();
372
373 // Object doesn't fit. Pick next empty region and start compacting there.
374 ShenandoahHeapRegion* new_to_region;
375 if (_empty_regions_pos < _empty_regions.length()) {
376 new_to_region = _empty_regions.at(_empty_regions_pos);
377 _empty_regions_pos++;
378 } else {
379 // Out of empty region? Compact within the same region.
380 new_to_region = _from_region;
381 }
382
383 assert(new_to_region != _to_region, "must not reuse same to-region");
384 assert(new_to_region != nullptr, "must not be null");
385 _to_region = new_to_region;
386 _compact_point = _to_region->bottom();
387 }
388
389 // Object fits into current region, record new location, if object does not move:
390 assert(_compact_point + obj_size <= _to_region->end(), "must fit");
391 shenandoah_assert_not_forwarded(nullptr, p);
392 if (_compact_point != cast_from_oop<HeapWord*>(p)) {
393 _preserved_marks->push_if_necessary(p, p->mark());
394 FullGCForwarding::forward_to(p, cast_to_oop(_compact_point));
395 }
396 _compact_point += obj_size;
397 }
398 };
399
400 class ShenandoahPrepareForCompactionTask : public WorkerTask {
401 private:
402 PreservedMarksSet* const _preserved_marks;
403 ShenandoahHeap* const _heap;
404 ShenandoahHeapRegionSet** const _worker_slices;
405
406 public:
886 private:
887 ShenandoahHeap* const _heap;
888 uint const _worker_id;
889
890 public:
891 ShenandoahCompactObjectsClosure(uint worker_id) :
892 _heap(ShenandoahHeap::heap()), _worker_id(worker_id) {}
893
894 void do_object(oop p) {
895 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
896 size_t size = p->size();
897 if (FullGCForwarding::is_forwarded(p)) {
898 HeapWord* compact_from = cast_from_oop<HeapWord*>(p);
899 HeapWord* compact_to = cast_from_oop<HeapWord*>(FullGCForwarding::forwardee(p));
900 assert(compact_from != compact_to, "Forwarded object should move");
901 Copy::aligned_conjoint_words(compact_from, compact_to, size);
902 oop new_obj = cast_to_oop(compact_to);
903
904 ContinuationGCSupport::relativize_stack_chunk(new_obj);
905 new_obj->init_mark();
906 }
907 }
908 };
909
910 class ShenandoahCompactObjectsTask : public WorkerTask {
911 private:
912 ShenandoahHeap* const _heap;
913 ShenandoahHeapRegionSet** const _worker_slices;
914
915 public:
916 ShenandoahCompactObjectsTask(ShenandoahHeapRegionSet** worker_slices) :
917 WorkerTask("Shenandoah Compact Objects"),
918 _heap(ShenandoahHeap::heap()),
919 _worker_slices(worker_slices) {
920 }
921
922 void work(uint worker_id) {
923 ShenandoahParallelWorkerSession worker_session(worker_id);
924 ShenandoahHeapRegionSetIterator slice(_worker_slices[worker_id]);
925
|
228 // Coming out of Full GC, we would not have any forwarded objects.
229 // This also prevents resolves with fwdptr from kicking in while adjusting pointers in phase3.
230 heap->set_has_forwarded_objects(false);
231
232 heap->set_full_gc_move_in_progress(true);
233
234 // Setup workers for the rest
235 OrderAccess::fence();
236
237 // Initialize worker slices
238 ShenandoahHeapRegionSet** worker_slices = NEW_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, heap->max_workers(), mtGC);
239 for (uint i = 0; i < heap->max_workers(); i++) {
240 worker_slices[i] = new ShenandoahHeapRegionSet();
241 }
242
243 {
244 // The rest of code performs region moves, where region status is undefined
245 // until all phases run together.
246 ShenandoahHeapLocker lock(heap->lock());
247
248 FullGCForwarding::begin();
249
250 phase2_calculate_target_addresses(worker_slices);
251
252 OrderAccess::fence();
253
254 phase3_update_references();
255
256 phase4_compact_objects(worker_slices);
257
258 phase5_epilog();
259
260 FullGCForwarding::end();
261 }
262
263 // Resize metaspace
264 MetaspaceGC::compute_new_size();
265
266 // Free worker slices
267 for (uint i = 0; i < heap->max_workers(); i++) {
268 delete worker_slices[i];
269 }
270 FREE_C_HEAP_ARRAY(ShenandoahHeapRegionSet*, worker_slices);
271
272 heap->set_full_gc_move_in_progress(false);
273 heap->set_full_gc_in_progress(false);
274
275 if (ShenandoahVerify) {
276 heap->verifier()->verify_after_fullgc();
277 }
278
279 if (VerifyAfterGC) {
280 Universe::verify();
353 }
354
355 void finish() {
356 assert(_to_region != nullptr, "should not happen");
357 _to_region->set_new_top(_compact_point);
358 }
359
360 bool is_compact_same_region() {
361 return _from_region == _to_region;
362 }
363
364 int empty_regions_pos() {
365 return _empty_regions_pos;
366 }
367
368 void do_object(oop p) {
369 assert(_from_region != nullptr, "must set before work");
370 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
371 assert(!_heap->complete_marking_context()->allocated_after_mark_start(p), "must be truly marked");
372
373 size_t old_size = p->size();
374 size_t new_size = p->copy_size(old_size, p->mark());
375 size_t obj_size = _compact_point == cast_from_oop<HeapWord*>(p) ? old_size : new_size;
376 if (_compact_point + obj_size > _to_region->end()) {
377 finish();
378
379 // Object doesn't fit. Pick next empty region and start compacting there.
380 ShenandoahHeapRegion* new_to_region;
381 if (_empty_regions_pos < _empty_regions.length()) {
382 new_to_region = _empty_regions.at(_empty_regions_pos);
383 _empty_regions_pos++;
384 } else {
385 // Out of empty region? Compact within the same region.
386 new_to_region = _from_region;
387 }
388
389 assert(new_to_region != _to_region, "must not reuse same to-region");
390 assert(new_to_region != nullptr, "must not be null");
391 _to_region = new_to_region;
392 _compact_point = _to_region->bottom();
393 obj_size = _compact_point == cast_from_oop<HeapWord*>(p) ? old_size : new_size;
394 }
395
396 // Object fits into current region, record new location, if object does not move:
397 assert(_compact_point + obj_size <= _to_region->end(), "must fit");
398 shenandoah_assert_not_forwarded(nullptr, p);
399 if (_compact_point != cast_from_oop<HeapWord*>(p)) {
400 _preserved_marks->push_if_necessary(p, p->mark());
401 FullGCForwarding::forward_to(p, cast_to_oop(_compact_point));
402 }
403 _compact_point += obj_size;
404 }
405 };
406
407 class ShenandoahPrepareForCompactionTask : public WorkerTask {
408 private:
409 PreservedMarksSet* const _preserved_marks;
410 ShenandoahHeap* const _heap;
411 ShenandoahHeapRegionSet** const _worker_slices;
412
413 public:
893 private:
894 ShenandoahHeap* const _heap;
895 uint const _worker_id;
896
897 public:
898 ShenandoahCompactObjectsClosure(uint worker_id) :
899 _heap(ShenandoahHeap::heap()), _worker_id(worker_id) {}
900
901 void do_object(oop p) {
902 assert(_heap->complete_marking_context()->is_marked(p), "must be marked");
903 size_t size = p->size();
904 if (FullGCForwarding::is_forwarded(p)) {
905 HeapWord* compact_from = cast_from_oop<HeapWord*>(p);
906 HeapWord* compact_to = cast_from_oop<HeapWord*>(FullGCForwarding::forwardee(p));
907 assert(compact_from != compact_to, "Forwarded object should move");
908 Copy::aligned_conjoint_words(compact_from, compact_to, size);
909 oop new_obj = cast_to_oop(compact_to);
910
911 ContinuationGCSupport::relativize_stack_chunk(new_obj);
912 new_obj->init_mark();
913 new_obj->initialize_hash_if_necessary(p);
914 }
915 }
916 };
917
918 class ShenandoahCompactObjectsTask : public WorkerTask {
919 private:
920 ShenandoahHeap* const _heap;
921 ShenandoahHeapRegionSet** const _worker_slices;
922
923 public:
924 ShenandoahCompactObjectsTask(ShenandoahHeapRegionSet** worker_slices) :
925 WorkerTask("Shenandoah Compact Objects"),
926 _heap(ShenandoahHeap::heap()),
927 _worker_slices(worker_slices) {
928 }
929
930 void work(uint worker_id) {
931 ShenandoahParallelWorkerSession worker_session(worker_id);
932 ShenandoahHeapRegionSetIterator slice(_worker_slices[worker_id]);
933
|