< prev index next >

src/hotspot/share/gc/g1/g1FullCollector.cpp

Print this page

194   _heap->prepare_heap_for_full_collection();
195 
196   PrepareRegionsClosure cl(this);
197   _heap->heap_region_iterate(&cl);
198 
199   reference_processor()->start_discovery(scope()->should_clear_soft_refs());
200 
201   // Clear and activate derived pointer collection.
202   clear_and_activate_derived_pointers();
203 }
204 
205 void G1FullCollector::collect() {
206   G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
207 
208   phase1_mark_live_objects();
209   verify_after_marking();
210 
211   // Don't add any more derived pointers during later phases
212   deactivate_derived_pointers();
213 


214   phase2_prepare_compaction();
215 
216   if (has_compaction_targets()) {
217     phase3_adjust_pointers();
218 
219     phase4_do_compaction();
220   } else {
221     // All regions have a high live ratio thus will not be compacted.
222     // The live ratio is only considered if do_maximal_compaction is false.
223     log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
224   }
225 


226   phase5_reset_metadata();
227 
228   G1CollectedHeap::finish_codecache_marking_cycle();
229 }
230 
231 void G1FullCollector::complete_collection() {
232   // Restore all marks.
233   restore_marks();
234 
235   // When the pointers have been adjusted and moved, we can
236   // update the derived pointer table.
237   update_derived_pointers();
238 
239   // Need completely cleared claim bits for the next concurrent marking or full gc.
240   ClassLoaderDataGraph::clear_claimed_marks();
241 
242   // Prepare the bitmap for the next (potentially concurrent) marking.
243   _heap->concurrent_mark()->clear_bitmap(_heap->workers());
244 
245   _heap->prepare_for_mutator_after_full_collection();

194   _heap->prepare_heap_for_full_collection();
195 
196   PrepareRegionsClosure cl(this);
197   _heap->heap_region_iterate(&cl);
198 
199   reference_processor()->start_discovery(scope()->should_clear_soft_refs());
200 
201   // Clear and activate derived pointer collection.
202   clear_and_activate_derived_pointers();
203 }
204 
205 void G1FullCollector::collect() {
206   G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
207 
208   phase1_mark_live_objects();
209   verify_after_marking();
210 
211   // Don't add any more derived pointers during later phases
212   deactivate_derived_pointers();
213 
214   FullGCForwarding::begin();
215 
216   phase2_prepare_compaction();
217 
218   if (has_compaction_targets()) {
219     phase3_adjust_pointers();
220 
221     phase4_do_compaction();
222   } else {
223     // All regions have a high live ratio thus will not be compacted.
224     // The live ratio is only considered if do_maximal_compaction is false.
225     log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
226   }
227 
228   FullGCForwarding::end();
229 
230   phase5_reset_metadata();
231 
232   G1CollectedHeap::finish_codecache_marking_cycle();
233 }
234 
235 void G1FullCollector::complete_collection() {
236   // Restore all marks.
237   restore_marks();
238 
239   // When the pointers have been adjusted and moved, we can
240   // update the derived pointer table.
241   update_derived_pointers();
242 
243   // Need completely cleared claim bits for the next concurrent marking or full gc.
244   ClassLoaderDataGraph::clear_claimed_marks();
245 
246   // Prepare the bitmap for the next (potentially concurrent) marking.
247   _heap->concurrent_mark()->clear_bitmap(_heap->workers());
248 
249   _heap->prepare_for_mutator_after_full_collection();
< prev index next >