< prev index next >

src/hotspot/share/gc/parallel/psParallelCompact.cpp

Print this page

 968   if (split_info.is_valid()) {
 969     split_info.clear();
 970   }
 971   DEBUG_ONLY(split_info.verify_clear();)
 972 }
 973 
 974 void PSParallelCompact::pre_compact()
 975 {
 976   // Update the from & to space pointers in space_info, since they are swapped
 977   // at each young gen gc.  Do the update unconditionally (even though a
 978   // promotion failure does not swap spaces) because an unknown number of young
 979   // collections will have swapped the spaces an unknown number of times.
 980   GCTraceTime(Debug, gc, phases) tm("Pre Compact", &_gc_timer);
 981   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 982   _space_info[from_space_id].set_space(heap->young_gen()->from_space());
 983   _space_info[to_space_id].set_space(heap->young_gen()->to_space());
 984 
 985   // Increment the invocation count
 986   heap->increment_total_collections(true);
 987 


 988   // We need to track unique mark sweep invocations as well.
 989   _total_invocations++;
 990 
 991   heap->print_heap_before_gc();
 992   heap->trace_heap_before_gc(&_gc_tracer);
 993 
 994   // Fill in TLABs
 995   heap->ensure_parsability(true);  // retire TLABs
 996 
 997   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
 998     Universe::verify("Before GC");
 999   }
1000 
1001   // Verify object start arrays
1002   if (VerifyObjectStartArray &&
1003       VerifyBeforeGC) {
1004     heap->old_gen()->verify_object_start_array();
1005   }
1006 
1007   DEBUG_ONLY(mark_bitmap()->verify_clear();)
1008   DEBUG_ONLY(summary_data().verify_clear();)
1009 
1010   ParCompactionManager::reset_all_bitmap_query_caches();
1011 }
1012 
1013 void PSParallelCompact::post_compact()
1014 {
1015   GCTraceTime(Info, gc, phases) tm("Post Compact", &_gc_timer);
1016   ParCompactionManager::remove_all_shadow_regions();
1017 


1018   for (unsigned int id = old_space_id; id < last_space_id; ++id) {
1019     // Clear the marking bitmap, summary data and split info.
1020     clear_data_covering_space(SpaceId(id));
1021     // Update top().  Must be done after clearing the bitmap and summary data.
1022     _space_info[id].publish_new_top();
1023   }
1024 
1025   ParCompactionManager::flush_all_string_dedup_requests();
1026 
1027   MutableSpace* const eden_space = _space_info[eden_space_id].space();
1028   MutableSpace* const from_space = _space_info[from_space_id].space();
1029   MutableSpace* const to_space   = _space_info[to_space_id].space();
1030 
1031   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
1032   bool eden_empty = eden_space->is_empty();
1033 
1034   // Update heap occupancy information which is used as input to the soft ref
1035   // clearing policy at the next gc.
1036   Universe::heap()->update_capacity_and_used_at_gc();
1037 

1940   _gc_tracer.report_dense_prefix(dense_prefix(old_space_id));
1941   _gc_tracer.report_gc_end(_gc_timer.gc_end(), _gc_timer.time_partitions());
1942 
1943   return true;
1944 }
1945 
1946 class PCAddThreadRootsMarkingTaskClosure : public ThreadClosure {
1947 private:
1948   uint _worker_id;
1949 
1950 public:
1951   PCAddThreadRootsMarkingTaskClosure(uint worker_id) : _worker_id(worker_id) { }
1952   void do_thread(Thread* thread) {
1953     assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
1954 
1955     ResourceMark rm;
1956 
1957     ParCompactionManager* cm = ParCompactionManager::gc_thread_compaction_manager(_worker_id);
1958 
1959     PCMarkAndPushClosure mark_and_push_closure(cm);
1960     MarkingCodeBlobClosure mark_and_push_in_blobs(&mark_and_push_closure, !CodeBlobToOopClosure::FixRelocations);
1961 
1962     thread->oops_do(&mark_and_push_closure, &mark_and_push_in_blobs);
1963 
1964     // Do the real work
1965     cm->follow_marking_stacks();
1966   }
1967 };
1968 
1969 static void mark_from_roots_work(ParallelRootType::Value root_type, uint worker_id) {
1970   assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
1971 
1972   ParCompactionManager* cm =
1973     ParCompactionManager::gc_thread_compaction_manager(worker_id);
1974   PCMarkAndPushClosure mark_and_push_closure(cm);
1975 
1976   switch (root_type) {
1977     case ParallelRootType::class_loader_data:
1978       {
1979         CLDToOopClosure cld_closure(&mark_and_push_closure, ClassLoaderData::_claim_strong);
1980         ClassLoaderDataGraph::always_strong_cld_do(&cld_closure);

 968   if (split_info.is_valid()) {
 969     split_info.clear();
 970   }
 971   DEBUG_ONLY(split_info.verify_clear();)
 972 }
 973 
 974 void PSParallelCompact::pre_compact()
 975 {
 976   // Update the from & to space pointers in space_info, since they are swapped
 977   // at each young gen gc.  Do the update unconditionally (even though a
 978   // promotion failure does not swap spaces) because an unknown number of young
 979   // collections will have swapped the spaces an unknown number of times.
 980   GCTraceTime(Debug, gc, phases) tm("Pre Compact", &_gc_timer);
 981   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
 982   _space_info[from_space_id].set_space(heap->young_gen()->from_space());
 983   _space_info[to_space_id].set_space(heap->young_gen()->to_space());
 984 
 985   // Increment the invocation count
 986   heap->increment_total_collections(true);
 987 
 988   CodeCache::increment_marking_cycle();
 989 
 990   // We need to track unique mark sweep invocations as well.
 991   _total_invocations++;
 992 
 993   heap->print_heap_before_gc();
 994   heap->trace_heap_before_gc(&_gc_tracer);
 995 
 996   // Fill in TLABs
 997   heap->ensure_parsability(true);  // retire TLABs
 998 
 999   if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) {
1000     Universe::verify("Before GC");
1001   }
1002 
1003   // Verify object start arrays
1004   if (VerifyObjectStartArray &&
1005       VerifyBeforeGC) {
1006     heap->old_gen()->verify_object_start_array();
1007   }
1008 
1009   DEBUG_ONLY(mark_bitmap()->verify_clear();)
1010   DEBUG_ONLY(summary_data().verify_clear();)
1011 
1012   ParCompactionManager::reset_all_bitmap_query_caches();
1013 }
1014 
1015 void PSParallelCompact::post_compact()
1016 {
1017   GCTraceTime(Info, gc, phases) tm("Post Compact", &_gc_timer);
1018   ParCompactionManager::remove_all_shadow_regions();
1019 
1020   CodeCache::increment_marking_cycle();
1021 
1022   for (unsigned int id = old_space_id; id < last_space_id; ++id) {
1023     // Clear the marking bitmap, summary data and split info.
1024     clear_data_covering_space(SpaceId(id));
1025     // Update top().  Must be done after clearing the bitmap and summary data.
1026     _space_info[id].publish_new_top();
1027   }
1028 
1029   ParCompactionManager::flush_all_string_dedup_requests();
1030 
1031   MutableSpace* const eden_space = _space_info[eden_space_id].space();
1032   MutableSpace* const from_space = _space_info[from_space_id].space();
1033   MutableSpace* const to_space   = _space_info[to_space_id].space();
1034 
1035   ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
1036   bool eden_empty = eden_space->is_empty();
1037 
1038   // Update heap occupancy information which is used as input to the soft ref
1039   // clearing policy at the next gc.
1040   Universe::heap()->update_capacity_and_used_at_gc();
1041 

1944   _gc_tracer.report_dense_prefix(dense_prefix(old_space_id));
1945   _gc_tracer.report_gc_end(_gc_timer.gc_end(), _gc_timer.time_partitions());
1946 
1947   return true;
1948 }
1949 
1950 class PCAddThreadRootsMarkingTaskClosure : public ThreadClosure {
1951 private:
1952   uint _worker_id;
1953 
1954 public:
1955   PCAddThreadRootsMarkingTaskClosure(uint worker_id) : _worker_id(worker_id) { }
1956   void do_thread(Thread* thread) {
1957     assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
1958 
1959     ResourceMark rm;
1960 
1961     ParCompactionManager* cm = ParCompactionManager::gc_thread_compaction_manager(_worker_id);
1962 
1963     PCMarkAndPushClosure mark_and_push_closure(cm);
1964     MarkingCodeBlobClosure mark_and_push_in_blobs(&mark_and_push_closure, !CodeBlobToOopClosure::FixRelocations, true /* keepalive nmethods */);
1965 
1966     thread->oops_do(&mark_and_push_closure, &mark_and_push_in_blobs);
1967 
1968     // Do the real work
1969     cm->follow_marking_stacks();
1970   }
1971 };
1972 
1973 static void mark_from_roots_work(ParallelRootType::Value root_type, uint worker_id) {
1974   assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
1975 
1976   ParCompactionManager* cm =
1977     ParCompactionManager::gc_thread_compaction_manager(worker_id);
1978   PCMarkAndPushClosure mark_and_push_closure(cm);
1979 
1980   switch (root_type) {
1981     case ParallelRootType::class_loader_data:
1982       {
1983         CLDToOopClosure cld_closure(&mark_and_push_closure, ClassLoaderData::_claim_strong);
1984         ClassLoaderDataGraph::always_strong_cld_do(&cld_closure);
< prev index next >