194 _heap->prepare_heap_for_full_collection();
195
196 PrepareRegionsClosure cl(this);
197 _heap->heap_region_iterate(&cl);
198
199 reference_processor()->start_discovery(scope()->should_clear_soft_refs());
200
201 // Clear and activate derived pointer collection.
202 clear_and_activate_derived_pointers();
203 }
204
205 void G1FullCollector::collect() {
206 G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
207
208 phase1_mark_live_objects();
209 verify_after_marking();
210
211 // Don't add any more derived pointers during later phases
212 deactivate_derived_pointers();
213
214 phase2_prepare_compaction();
215
216 if (has_compaction_targets()) {
217 phase3_adjust_pointers();
218
219 phase4_do_compaction();
220 } else {
221 // All regions have a high live ratio thus will not be compacted.
222 // The live ratio is only considered if do_maximal_compaction is false.
223 log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
224 }
225
226 phase5_reset_metadata();
227 }
228
229 void G1FullCollector::complete_collection(size_t allocation_word_size) {
230 // Restore all marks.
231 restore_marks();
232
233 // When the pointers have been adjusted and moved, we can
234 // update the derived pointer table.
235 update_derived_pointers();
236
237 // Need completely cleared claim bits for the next concurrent marking or full gc.
238 ClassLoaderDataGraph::clear_claimed_marks();
239
240 // Prepare the bitmap for the next (potentially concurrent) marking.
241 _heap->concurrent_mark()->clear_bitmap(_heap->workers());
242
243 _heap->prepare_for_mutator_after_full_collection(allocation_word_size);
244
245 _heap->resize_all_tlabs();
|
194 _heap->prepare_heap_for_full_collection();
195
196 PrepareRegionsClosure cl(this);
197 _heap->heap_region_iterate(&cl);
198
199 reference_processor()->start_discovery(scope()->should_clear_soft_refs());
200
201 // Clear and activate derived pointer collection.
202 clear_and_activate_derived_pointers();
203 }
204
205 void G1FullCollector::collect() {
206 G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
207
208 phase1_mark_live_objects();
209 verify_after_marking();
210
211 // Don't add any more derived pointers during later phases
212 deactivate_derived_pointers();
213
214 FullGCForwarding::begin();
215
216 phase2_prepare_compaction();
217
218 if (has_compaction_targets()) {
219 phase3_adjust_pointers();
220
221 phase4_do_compaction();
222 } else {
223 // All regions have a high live ratio thus will not be compacted.
224 // The live ratio is only considered if do_maximal_compaction is false.
225 log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
226 }
227
228 FullGCForwarding::end();
229
230 phase5_reset_metadata();
231 }
232
233 void G1FullCollector::complete_collection(size_t allocation_word_size) {
234 // Restore all marks.
235 restore_marks();
236
237 // When the pointers have been adjusted and moved, we can
238 // update the derived pointer table.
239 update_derived_pointers();
240
241 // Need completely cleared claim bits for the next concurrent marking or full gc.
242 ClassLoaderDataGraph::clear_claimed_marks();
243
244 // Prepare the bitmap for the next (potentially concurrent) marking.
245 _heap->concurrent_mark()->clear_bitmap(_heap->workers());
246
247 _heap->prepare_for_mutator_after_full_collection(allocation_word_size);
248
249 _heap->resize_all_tlabs();
|