195 _heap->prepare_heap_for_full_collection();
196
197 PrepareRegionsClosure cl(this);
198 _heap->heap_region_iterate(&cl);
199
200 reference_processor()->start_discovery(scope()->should_clear_soft_refs());
201
202 // Clear and activate derived pointer collection.
203 clear_and_activate_derived_pointers();
204 }
205
206 void G1FullCollector::collect() {
207 G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
208
209 phase1_mark_live_objects();
210 verify_after_marking();
211
212 // Don't add any more derived pointers during later phases
213 deactivate_derived_pointers();
214
215 phase2_prepare_compaction();
216
217 if (has_compaction_targets()) {
218 phase3_adjust_pointers();
219
220 phase4_do_compaction();
221 } else {
222 // All regions have a high live ratio thus will not be compacted.
223 // The live ratio is only considered if do_maximal_compaction is false.
224 log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
225 }
226
227 phase5_reset_metadata();
228
229 G1CollectedHeap::finish_codecache_marking_cycle();
230 }
231
232 void G1FullCollector::complete_collection() {
233 // Restore all marks.
234 restore_marks();
235
236 // When the pointers have been adjusted and moved, we can
237 // update the derived pointer table.
238 update_derived_pointers();
239
240 // Need completely cleared claim bits for the next concurrent marking or full gc.
241 ClassLoaderDataGraph::clear_claimed_marks();
242
243 // Prepare the bitmap for the next (potentially concurrent) marking.
244 _heap->concurrent_mark()->clear_bitmap(_heap->workers());
245
246 _heap->prepare_for_mutator_after_full_collection();
|
195 _heap->prepare_heap_for_full_collection();
196
197 PrepareRegionsClosure cl(this);
198 _heap->heap_region_iterate(&cl);
199
200 reference_processor()->start_discovery(scope()->should_clear_soft_refs());
201
202 // Clear and activate derived pointer collection.
203 clear_and_activate_derived_pointers();
204 }
205
206 void G1FullCollector::collect() {
207 G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
208
209 phase1_mark_live_objects();
210 verify_after_marking();
211
212 // Don't add any more derived pointers during later phases
213 deactivate_derived_pointers();
214
215 FullGCForwarding::begin();
216
217 phase2_prepare_compaction();
218
219 if (has_compaction_targets()) {
220 phase3_adjust_pointers();
221
222 phase4_do_compaction();
223 } else {
224 // All regions have a high live ratio thus will not be compacted.
225 // The live ratio is only considered if do_maximal_compaction is false.
226 log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
227 }
228
229 FullGCForwarding::end();
230
231 phase5_reset_metadata();
232
233 G1CollectedHeap::finish_codecache_marking_cycle();
234 }
235
236 void G1FullCollector::complete_collection() {
237 // Restore all marks.
238 restore_marks();
239
240 // When the pointers have been adjusted and moved, we can
241 // update the derived pointer table.
242 update_derived_pointers();
243
244 // Need completely cleared claim bits for the next concurrent marking or full gc.
245 ClassLoaderDataGraph::clear_claimed_marks();
246
247 // Prepare the bitmap for the next (potentially concurrent) marking.
248 _heap->concurrent_mark()->clear_bitmap(_heap->workers());
249
250 _heap->prepare_for_mutator_after_full_collection();
|