< prev index next >

src/hotspot/share/gc/g1/g1FullCollector.cpp

Print this page

 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/systemDictionary.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "compiler/oopMap.hpp"
 29 #include "gc/g1/g1CollectedHeap.hpp"
 30 #include "gc/g1/g1FullCollector.inline.hpp"
 31 #include "gc/g1/g1FullGCAdjustTask.hpp"
 32 #include "gc/g1/g1FullGCCompactTask.hpp"
 33 #include "gc/g1/g1FullGCMarker.inline.hpp"
 34 #include "gc/g1/g1FullGCMarkTask.hpp"
 35 #include "gc/g1/g1FullGCPrepareTask.hpp"
 36 #include "gc/g1/g1FullGCScope.hpp"
 37 #include "gc/g1/g1OopClosures.hpp"
 38 #include "gc/g1/g1Policy.hpp"
 39 #include "gc/g1/g1RegionMarkStatsCache.inline.hpp"
 40 #include "gc/shared/gcTraceTime.inline.hpp"
 41 #include "gc/shared/preservedMarks.hpp"
 42 #include "gc/shared/referenceProcessor.hpp"

 43 #include "gc/shared/verifyOption.hpp"
 44 #include "gc/shared/weakProcessor.inline.hpp"
 45 #include "gc/shared/workerPolicy.hpp"
 46 #include "logging/log.hpp"
 47 #include "runtime/handles.inline.hpp"
 48 #include "utilities/debug.hpp"
 49 
 50 static void clear_and_activate_derived_pointers() {
 51 #if COMPILER2_OR_JVMCI
 52   DerivedPointerTable::clear();
 53 #endif
 54 }
 55 
 56 static void deactivate_derived_pointers() {
 57 #if COMPILER2_OR_JVMCI
 58   DerivedPointerTable::set_active(false);
 59 #endif
 60 }
 61 
 62 static void update_derived_pointers() {

281 
282   // Weak oops cleanup.
283   {
284     GCTraceTime(Debug, gc, phases) debug("Phase 1: Weak Processing", scope()->timer());
285     WeakProcessor::weak_oops_do(_heap->workers(), &_is_alive, &do_nothing_cl, 1);
286   }
287 
288   // Class unloading and cleanup.
289   if (ClassUnloading) {
290     GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
291     // Unload classes and purge the SystemDictionary.
292     bool purged_class = SystemDictionary::do_unloading(scope()->timer());
293     _heap->complete_cleaning(&_is_alive, purged_class);
294   }
295 
296   scope()->tracer()->report_object_count_after_gc(&_is_alive);
297 }
298 
299 void G1FullCollector::phase2_prepare_compaction() {
300   GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());

301   G1FullGCPrepareTask task(this);
302   run_task(&task);
303 
304   // To avoid OOM when there is memory left.
305   if (!task.has_freed_regions()) {
306     task.prepare_serial_compaction();
307   }

308 }
309 
310 void G1FullCollector::phase3_adjust_pointers() {
311   // Adjust the pointers to reflect the new locations
312   GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers", scope()->timer());
313 
314   G1FullGCAdjustTask task(this);
315   run_task(&task);
316 }
317 
318 void G1FullCollector::phase4_do_compaction() {
319   // Compact the heap using the compaction queues created in phase 2.
320   GCTraceTime(Info, gc, phases) info("Phase 4: Compact heap", scope()->timer());
321   G1FullGCCompactTask task(this);
322   run_task(&task);
323 
324   // Serial compact to avoid OOM when very few free regions.
325   if (serial_compaction_point()->has_regions()) {
326     task.serial_compaction();
327   }

 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/systemDictionary.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "compiler/oopMap.hpp"
 29 #include "gc/g1/g1CollectedHeap.hpp"
 30 #include "gc/g1/g1FullCollector.inline.hpp"
 31 #include "gc/g1/g1FullGCAdjustTask.hpp"
 32 #include "gc/g1/g1FullGCCompactTask.hpp"
 33 #include "gc/g1/g1FullGCMarker.inline.hpp"
 34 #include "gc/g1/g1FullGCMarkTask.hpp"
 35 #include "gc/g1/g1FullGCPrepareTask.hpp"
 36 #include "gc/g1/g1FullGCScope.hpp"
 37 #include "gc/g1/g1OopClosures.hpp"
 38 #include "gc/g1/g1Policy.hpp"
 39 #include "gc/g1/g1RegionMarkStatsCache.inline.hpp"
 40 #include "gc/shared/gcTraceTime.inline.hpp"
 41 #include "gc/shared/preservedMarks.hpp"
 42 #include "gc/shared/referenceProcessor.hpp"
 43 #include "gc/shared/slidingForwarding.hpp"
 44 #include "gc/shared/verifyOption.hpp"
 45 #include "gc/shared/weakProcessor.inline.hpp"
 46 #include "gc/shared/workerPolicy.hpp"
 47 #include "logging/log.hpp"
 48 #include "runtime/handles.inline.hpp"
 49 #include "utilities/debug.hpp"
 50 
 51 static void clear_and_activate_derived_pointers() {
 52 #if COMPILER2_OR_JVMCI
 53   DerivedPointerTable::clear();
 54 #endif
 55 }
 56 
 57 static void deactivate_derived_pointers() {
 58 #if COMPILER2_OR_JVMCI
 59   DerivedPointerTable::set_active(false);
 60 #endif
 61 }
 62 
 63 static void update_derived_pointers() {

282 
283   // Weak oops cleanup.
284   {
285     GCTraceTime(Debug, gc, phases) debug("Phase 1: Weak Processing", scope()->timer());
286     WeakProcessor::weak_oops_do(_heap->workers(), &_is_alive, &do_nothing_cl, 1);
287   }
288 
289   // Class unloading and cleanup.
290   if (ClassUnloading) {
291     GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
292     // Unload classes and purge the SystemDictionary.
293     bool purged_class = SystemDictionary::do_unloading(scope()->timer());
294     _heap->complete_cleaning(&_is_alive, purged_class);
295   }
296 
297   scope()->tracer()->report_object_count_after_gc(&_is_alive);
298 }
299 
300 void G1FullCollector::phase2_prepare_compaction() {
301   GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());
302   _heap->forwarding()->clear();
303   G1FullGCPrepareTask task(this);
304   run_task(&task);
305 
306   // To avoid OOM when there is memory left.
307   // TODO: Disabled for now because it violates sliding-forwarding assumption.
308   // if (!task.has_freed_regions()) {
309   //   task.prepare_serial_compaction();
310   // }
311 }
312 
313 void G1FullCollector::phase3_adjust_pointers() {
314   // Adjust the pointers to reflect the new locations
315   GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers", scope()->timer());
316 
317   G1FullGCAdjustTask task(this);
318   run_task(&task);
319 }
320 
321 void G1FullCollector::phase4_do_compaction() {
322   // Compact the heap using the compaction queues created in phase 2.
323   GCTraceTime(Info, gc, phases) info("Phase 4: Compact heap", scope()->timer());
324   G1FullGCCompactTask task(this);
325   run_task(&task);
326 
327   // Serial compact to avoid OOM when very few free regions.
328   if (serial_compaction_point()->has_regions()) {
329     task.serial_compaction();
330   }
< prev index next >