< prev index next >

src/hotspot/share/gc/g1/g1FullCollector.cpp

Print this page

204   _heap->prepare_heap_for_full_collection();
205 
206   PrepareRegionsClosure cl(this);
207   _heap->heap_region_iterate(&cl);
208 
209   reference_processor()->start_discovery(scope()->should_clear_soft_refs());
210 
211   // Clear and activate derived pointer collection.
212   clear_and_activate_derived_pointers();
213 }
214 
215 void G1FullCollector::collect() {
216   G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
217 
218   phase1_mark_live_objects();
219   verify_after_marking();
220 
221   // Don't add any more derived pointers during later phases
222   deactivate_derived_pointers();
223 


224   phase2_prepare_compaction();
225 
226   if (has_compaction_targets()) {
227     phase3_adjust_pointers();
228 
229     phase4_do_compaction();
230   } else {
231     // All regions have a high live ratio thus will not be compacted.
232     // The live ratio is only considered if do_maximal_compaction is false.
233     log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
234   }
235 


236   phase5_reset_metadata();
237 }
238 
239 void G1FullCollector::complete_collection(size_t allocation_word_size) {
240   // Restore all marks.
241   restore_marks();
242 
243   // When the pointers have been adjusted and moved, we can
244   // update the derived pointer table.
245   update_derived_pointers();
246 
247   // Need completely cleared claim bits for the next concurrent marking or full gc.
248   ClassLoaderDataGraph::clear_claimed_marks();
249 
250   // Prepare the bitmap for the next (potentially concurrent) marking.
251   _heap->concurrent_mark()->clear_bitmap(_heap->workers());
252 
253   _heap->prepare_for_mutator_after_full_collection(allocation_word_size);
254 
255   _heap->resize_all_tlabs();

204   _heap->prepare_heap_for_full_collection();
205 
206   PrepareRegionsClosure cl(this);
207   _heap->heap_region_iterate(&cl);
208 
209   reference_processor()->start_discovery(scope()->should_clear_soft_refs());
210 
211   // Clear and activate derived pointer collection.
212   clear_and_activate_derived_pointers();
213 }
214 
215 void G1FullCollector::collect() {
216   G1CollectedHeap::start_codecache_marking_cycle_if_inactive(false /* concurrent_mark_start */);
217 
218   phase1_mark_live_objects();
219   verify_after_marking();
220 
221   // Don't add any more derived pointers during later phases
222   deactivate_derived_pointers();
223 
224   FullGCForwarding::begin();
225 
226   phase2_prepare_compaction();
227 
228   if (has_compaction_targets()) {
229     phase3_adjust_pointers();
230 
231     phase4_do_compaction();
232   } else {
233     // All regions have a high live ratio thus will not be compacted.
234     // The live ratio is only considered if do_maximal_compaction is false.
235     log_info(gc, phases) ("No Regions selected for compaction. Skipping Phase 3: Adjust pointers and Phase 4: Compact heap");
236   }
237 
238   FullGCForwarding::end();
239 
240   phase5_reset_metadata();
241 }
242 
243 void G1FullCollector::complete_collection(size_t allocation_word_size) {
244   // Restore all marks.
245   restore_marks();
246 
247   // When the pointers have been adjusted and moved, we can
248   // update the derived pointer table.
249   update_derived_pointers();
250 
251   // Need completely cleared claim bits for the next concurrent marking or full gc.
252   ClassLoaderDataGraph::clear_claimed_marks();
253 
254   // Prepare the bitmap for the next (potentially concurrent) marking.
255   _heap->concurrent_mark()->clear_bitmap(_heap->workers());
256 
257   _heap->prepare_for_mutator_after_full_collection(allocation_word_size);
258 
259   _heap->resize_all_tlabs();
< prev index next >