23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp"
27 #include "code/codeCache.hpp"
28 #include "compiler/oopMap.hpp"
29 #include "gc/g1/g1CollectedHeap.hpp"
30 #include "gc/g1/g1FullCollector.inline.hpp"
31 #include "gc/g1/g1FullGCAdjustTask.hpp"
32 #include "gc/g1/g1FullGCCompactTask.hpp"
33 #include "gc/g1/g1FullGCMarker.inline.hpp"
34 #include "gc/g1/g1FullGCMarkTask.hpp"
35 #include "gc/g1/g1FullGCPrepareTask.hpp"
36 #include "gc/g1/g1FullGCScope.hpp"
37 #include "gc/g1/g1OopClosures.hpp"
38 #include "gc/g1/g1Policy.hpp"
39 #include "gc/g1/g1RegionMarkStatsCache.inline.hpp"
40 #include "gc/shared/gcTraceTime.inline.hpp"
41 #include "gc/shared/preservedMarks.hpp"
42 #include "gc/shared/referenceProcessor.hpp"
43 #include "gc/shared/verifyOption.hpp"
44 #include "gc/shared/weakProcessor.inline.hpp"
45 #include "gc/shared/workerPolicy.hpp"
46 #include "logging/log.hpp"
47 #include "runtime/biasedLocking.hpp"
48 #include "runtime/handles.inline.hpp"
49 #include "utilities/debug.hpp"
50
51 static void clear_and_activate_derived_pointers() {
52 #if COMPILER2_OR_JVMCI
53 DerivedPointerTable::clear();
54 #endif
55 }
56
57 static void deactivate_derived_pointers() {
58 #if COMPILER2_OR_JVMCI
59 DerivedPointerTable::set_active(false);
60 #endif
61 }
62
291
292 // Weak oops cleanup.
293 {
294 GCTraceTime(Debug, gc, phases) debug("Phase 1: Weak Processing", scope()->timer());
295 WeakProcessor::weak_oops_do(_heap->workers(), &_is_alive, &do_nothing_cl, 1);
296 }
297
298 // Class unloading and cleanup.
299 if (ClassUnloading) {
300 GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
301 // Unload classes and purge the SystemDictionary.
302 bool purged_class = SystemDictionary::do_unloading(scope()->timer());
303 _heap->complete_cleaning(&_is_alive, purged_class);
304 }
305
306 scope()->tracer()->report_object_count_after_gc(&_is_alive);
307 }
308
309 void G1FullCollector::phase2_prepare_compaction() {
310 GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());
311 G1FullGCPrepareTask task(this);
312 run_task(&task);
313
314 // To avoid OOM when there is memory left.
315 if (!task.has_freed_regions()) {
316 task.prepare_serial_compaction();
317 }
318 }
319
320 void G1FullCollector::phase3_adjust_pointers() {
321 // Adjust the pointers to reflect the new locations
322 GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers", scope()->timer());
323
324 G1FullGCAdjustTask task(this);
325 run_task(&task);
326 }
327
328 void G1FullCollector::phase4_do_compaction() {
329 // Compact the heap using the compaction queues created in phase 2.
330 GCTraceTime(Info, gc, phases) info("Phase 4: Compact heap", scope()->timer());
331 G1FullGCCompactTask task(this);
332 run_task(&task);
333
334 // Serial compact to avoid OOM when very few free regions.
335 if (serial_compaction_point()->has_regions()) {
336 task.serial_compaction();
337 }
|
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp"
27 #include "code/codeCache.hpp"
28 #include "compiler/oopMap.hpp"
29 #include "gc/g1/g1CollectedHeap.hpp"
30 #include "gc/g1/g1FullCollector.inline.hpp"
31 #include "gc/g1/g1FullGCAdjustTask.hpp"
32 #include "gc/g1/g1FullGCCompactTask.hpp"
33 #include "gc/g1/g1FullGCMarker.inline.hpp"
34 #include "gc/g1/g1FullGCMarkTask.hpp"
35 #include "gc/g1/g1FullGCPrepareTask.hpp"
36 #include "gc/g1/g1FullGCScope.hpp"
37 #include "gc/g1/g1OopClosures.hpp"
38 #include "gc/g1/g1Policy.hpp"
39 #include "gc/g1/g1RegionMarkStatsCache.inline.hpp"
40 #include "gc/shared/gcTraceTime.inline.hpp"
41 #include "gc/shared/preservedMarks.hpp"
42 #include "gc/shared/referenceProcessor.hpp"
43 #include "gc/shared/slidingForwarding.hpp"
44 #include "gc/shared/verifyOption.hpp"
45 #include "gc/shared/weakProcessor.inline.hpp"
46 #include "gc/shared/workerPolicy.hpp"
47 #include "logging/log.hpp"
48 #include "runtime/biasedLocking.hpp"
49 #include "runtime/handles.inline.hpp"
50 #include "utilities/debug.hpp"
51
52 static void clear_and_activate_derived_pointers() {
53 #if COMPILER2_OR_JVMCI
54 DerivedPointerTable::clear();
55 #endif
56 }
57
58 static void deactivate_derived_pointers() {
59 #if COMPILER2_OR_JVMCI
60 DerivedPointerTable::set_active(false);
61 #endif
62 }
63
292
293 // Weak oops cleanup.
294 {
295 GCTraceTime(Debug, gc, phases) debug("Phase 1: Weak Processing", scope()->timer());
296 WeakProcessor::weak_oops_do(_heap->workers(), &_is_alive, &do_nothing_cl, 1);
297 }
298
299 // Class unloading and cleanup.
300 if (ClassUnloading) {
301 GCTraceTime(Debug, gc, phases) debug("Phase 1: Class Unloading and Cleanup", scope()->timer());
302 // Unload classes and purge the SystemDictionary.
303 bool purged_class = SystemDictionary::do_unloading(scope()->timer());
304 _heap->complete_cleaning(&_is_alive, purged_class);
305 }
306
307 scope()->tracer()->report_object_count_after_gc(&_is_alive);
308 }
309
310 void G1FullCollector::phase2_prepare_compaction() {
311 GCTraceTime(Info, gc, phases) info("Phase 2: Prepare for compaction", scope()->timer());
312
313 _heap->forwarding()->clear();
314
315 G1FullGCPrepareTask task(this);
316 run_task(&task);
317
318 // TODO: Disabled for now because it violates sliding-forwarding assumption.
319 // To avoid OOM when there is memory left.
320 // if (!task.has_freed_regions()) {
321 // task.prepare_serial_compaction();
322 // }
323 }
324
325 void G1FullCollector::phase3_adjust_pointers() {
326 // Adjust the pointers to reflect the new locations
327 GCTraceTime(Info, gc, phases) info("Phase 3: Adjust pointers", scope()->timer());
328
329 G1FullGCAdjustTask task(this);
330 run_task(&task);
331 }
332
333 void G1FullCollector::phase4_do_compaction() {
334 // Compact the heap using the compaction queues created in phase 2.
335 GCTraceTime(Info, gc, phases) info("Phase 4: Compact heap", scope()->timer());
336 G1FullGCCompactTask task(this);
337 run_task(&task);
338
339 // Serial compact to avoid OOM when very few free regions.
340 if (serial_compaction_point()->has_regions()) {
341 task.serial_compaction();
342 }
|