< prev index next >

src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp

Print this page




1050     _rp(rp) {}
1051 
1052   void work(uint worker_id) {
1053     ShenandoahParallelWorkerSession worker_session(worker_id);
1054     ShenandoahEvacOOMScope oom_evac_scope;
1055     ShenandoahEvacuateUpdateRootsClosure cl;
1056     MarkingCodeBlobClosure blobsCl(&cl, CodeBlobToOopClosure::FixRelocations);
1057     _rp->roots_do(worker_id, &cl);
1058   }
1059 };
1060 
1061 void ShenandoahHeap::evacuate_and_update_roots() {
1062 #if COMPILER2_OR_JVMCI
1063   DerivedPointerTable::clear();
1064 #endif
1065   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only iterate roots while world is stopped");
1066   {
1067     // Include concurrent roots if current cycle can not process those roots concurrently
1068     ShenandoahRootEvacuator rp(workers()->active_workers(),
1069                                ShenandoahPhaseTimings::init_evac,
1070                                !ShenandoahConcurrentRoots::should_do_concurrent_roots());

1071     ShenandoahEvacuateUpdateRootsTask roots_task(&rp);
1072     workers()->run_task(&roots_task);
1073   }
1074 
1075 #if COMPILER2_OR_JVMCI
1076   DerivedPointerTable::update_pointers();
1077 #endif
1078 }
1079 
1080 // Returns size in bytes
1081 size_t ShenandoahHeap::unsafe_max_tlab_alloc(Thread *thread) const {
1082   if (ShenandoahElasticTLAB) {
1083     // With Elastic TLABs, return the max allowed size, and let the allocation path
1084     // figure out the safe size for current allocation.
1085     return ShenandoahHeapRegion::max_tlab_size_bytes();
1086   } else {
1087     return MIN2(_free_set->unsafe_peek_free(), ShenandoahHeapRegion::max_tlab_size_bytes());
1088   }
1089 }
1090 


1488         heuristics()->choose_collection_set(_collection_set);
1489 
1490         _free_set->rebuild();
1491       }
1492     }
1493 
1494     // If collection set has candidates, start evacuation.
1495     // Otherwise, bypass the rest of the cycle.
1496     if (!collection_set()->is_empty()) {
1497       ShenandoahGCPhase init_evac(ShenandoahPhaseTimings::init_evac);
1498 
1499       if (ShenandoahVerify) {
1500         verifier()->verify_before_evacuation();
1501       }
1502 
1503       set_evacuation_in_progress(true);
1504       // From here on, we need to update references.
1505       set_has_forwarded_objects(true);
1506 
1507       if (!is_degenerated_gc_in_progress()) {

1508         evacuate_and_update_roots();
1509       }
1510 
1511       if (ShenandoahPacing) {
1512         pacer()->setup_for_evac();
1513       }
1514 
1515       if (ShenandoahVerify) {

1516         if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1517           ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
1518           types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CLDGRoots);
1519           verifier()->verify_roots_no_forwarded_except(types);
1520         } else {
1521           verifier()->verify_roots_no_forwarded();
1522         }





1523         verifier()->verify_during_evacuation();
1524       }
1525     } else {
1526       if (ShenandoahVerify) {
1527         verifier()->verify_after_concmark();
1528       }
1529 
1530       if (VerifyAfterGC) {
1531         Universe::verify();
1532       }
1533     }
1534 
1535   } else {
1536     concurrent_mark()->cancel();
1537     stop_concurrent_marking();
1538 
1539     if (process_references()) {
1540       // Abandon reference processing right away: pre-cleaning must have failed.
1541       ReferenceProcessor *rp = ref_processor();
1542       rp->disable_discovery();


1593 
1594   void work(uint worker_id) {
1595     ShenandoahEvacOOMScope oom;
1596     {
1597       // jni_roots and weak_roots are OopStorage backed roots, concurrent iteration
1598       // may race against OopStorage::release() calls.
1599       ShenandoahEvacUpdateOopStorageRootsClosure cl;
1600       _vm_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1601       _weak_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1602     }
1603 
1604     {
1605       ShenandoahEvacuateUpdateRootsClosure cl;
1606       CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);
1607       _cld_roots.cld_do(&clds);
1608     }
1609   }
1610 };
1611 
1612 void ShenandoahHeap::op_roots() {
1613   if (is_evacuation_in_progress() &&
1614       ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1615     ShenandoahConcurrentRootsEvacUpdateTask task;
1616     workers()->run_task(&task);





1617   }
1618 }
1619 
1620 void ShenandoahHeap::op_reset() {
1621   reset_mark_bitmap();
1622 }
1623 
1624 void ShenandoahHeap::op_preclean() {
1625   concurrent_mark()->preclean_weak_refs();
1626 }
1627 
1628 void ShenandoahHeap::op_init_traversal() {
1629   traversal_gc()->init_traversal_collection();
1630 }
1631 
1632 void ShenandoahHeap::op_traversal() {
1633   traversal_gc()->concurrent_traversal_collection();
1634 }
1635 
1636 void ShenandoahHeap::op_final_traversal() {


1973     ShenandoahGCPhase phase(full_gc ?
1974                             ShenandoahPhaseTimings::full_gc_purge_par :
1975                             ShenandoahPhaseTimings::purge_par);
1976     ShenandoahIsAliveSelector is_alive;
1977     uint num_workers = _workers->active_workers();
1978     ShenandoahClassUnloadingTask unlink_task(is_alive.is_alive_closure(), num_workers, purged_class);
1979     _workers->run_task(&unlink_task);
1980   }
1981 
1982   {
1983     ShenandoahGCPhase phase(full_gc ?
1984                             ShenandoahPhaseTimings::full_gc_purge_cldg :
1985                             ShenandoahPhaseTimings::purge_cldg);
1986     ClassLoaderDataGraph::purge();
1987   }
1988   // Resize and verify metaspace
1989   MetaspaceGC::compute_new_size();
1990   MetaspaceUtils::verify_metrics();
1991 }
1992 
1993 // Process leftover weak oops: update them, if needed or assert they do not
1994 // need updating otherwise.
1995 // Weak processor API requires us to visit the oops, even if we are not doing
1996 // anything to them.
1997 void ShenandoahHeap::stw_process_weak_roots(bool full_gc) {
1998   ShenandoahGCPhase root_phase(full_gc ?
1999                                ShenandoahPhaseTimings::full_gc_purge :
2000                                ShenandoahPhaseTimings::purge);
2001   uint num_workers = _workers->active_workers();
2002   ShenandoahPhaseTimings::Phase timing_phase = full_gc ?
2003                                                ShenandoahPhaseTimings::full_gc_purge_par :
2004                                                ShenandoahPhaseTimings::purge_par;
2005   // Cleanup weak roots
2006   ShenandoahGCPhase phase(timing_phase);
2007   if (has_forwarded_objects()) {
2008     ShenandoahForwardedIsAliveClosure is_alive;
2009     ShenandoahUpdateRefsClosure keep_alive;
2010     ShenandoahParallelWeakRootsCleaningTask<ShenandoahForwardedIsAliveClosure, ShenandoahUpdateRefsClosure>
2011       cleaning_task(&is_alive, &keep_alive, num_workers);
2012     _workers->run_task(&cleaning_task);
2013   } else {
2014     ShenandoahIsAliveClosure is_alive;
2015 #ifdef ASSERT
2016   ShenandoahAssertNotForwardedClosure verify_cl;
2017   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, ShenandoahAssertNotForwardedClosure>
2018     cleaning_task(&is_alive, &verify_cl, num_workers);
2019 #else
2020   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, DoNothingClosure>
2021     cleaning_task(&is_alive, &do_nothing_cl, num_workers);
2022 #endif
2023     _workers->run_task(&cleaning_task);
2024   }
2025 }
2026 
2027 void ShenandoahHeap::parallel_cleaning(bool full_gc) {
2028   assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2029   stw_process_weak_roots(full_gc);
2030   stw_unload_classes(full_gc);


2031 }
2032 
2033 void ShenandoahHeap::set_has_forwarded_objects(bool cond) {
2034   set_gc_state_mask(HAS_FORWARDED, cond);
2035 }
2036 
2037 void ShenandoahHeap::set_process_references(bool pr) {
2038   _process_references.set_cond(pr);
2039 }
2040 
2041 void ShenandoahHeap::set_unload_classes(bool uc) {
2042   _unload_classes.set_cond(uc);
2043 }
2044 
2045 bool ShenandoahHeap::process_references() const {
2046   return _process_references.is_set();
2047 }
2048 
2049 bool ShenandoahHeap::unload_classes() const {
2050   return _unload_classes.is_set();


2073 }
2074 
2075 void ShenandoahHeap::set_degenerated_gc_in_progress(bool in_progress) {
2076   _degenerated_gc_in_progress.set_cond(in_progress);
2077 }
2078 
2079 void ShenandoahHeap::set_full_gc_in_progress(bool in_progress) {
2080   _full_gc_in_progress.set_cond(in_progress);
2081 }
2082 
2083 void ShenandoahHeap::set_full_gc_move_in_progress(bool in_progress) {
2084   assert (is_full_gc_in_progress(), "should be");
2085   _full_gc_move_in_progress.set_cond(in_progress);
2086 }
2087 
2088 void ShenandoahHeap::set_update_refs_in_progress(bool in_progress) {
2089   set_gc_state_mask(UPDATEREFS, in_progress);
2090 }
2091 
2092 void ShenandoahHeap::register_nmethod(nmethod* nm) {
2093   ShenandoahCodeRoots::add_nmethod(nm);
2094 }
2095 
2096 void ShenandoahHeap::unregister_nmethod(nmethod* nm) {
2097   ShenandoahCodeRoots::remove_nmethod(nm);




2098 }
2099 
2100 oop ShenandoahHeap::pin_object(JavaThread* thr, oop o) {
2101   ShenandoahHeapLocker locker(lock());
2102   heap_region_containing(o)->make_pinned();
2103   return o;
2104 }
2105 
2106 void ShenandoahHeap::unpin_object(JavaThread* thr, oop o) {
2107   ShenandoahHeapLocker locker(lock());
2108   heap_region_containing(o)->make_unpinned();
2109 }
2110 
2111 GCTimer* ShenandoahHeap::gc_timer() const {
2112   return _gc_timer;
2113 }
2114 















2115 #ifdef ASSERT
2116 void ShenandoahHeap::assert_gc_workers(uint nworkers) {
2117   assert(nworkers > 0 && nworkers <= max_workers(), "Sanity");
2118 
2119   if (ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
2120     if (UseDynamicNumberOfGCThreads ||
2121         (FLAG_IS_DEFAULT(ParallelGCThreads) && ForceDynamicNumberOfGCThreads)) {
2122       assert(nworkers <= ParallelGCThreads, "Cannot use more than it has");
2123     } else {
2124       // Use ParallelGCThreads inside safepoints
2125       assert(nworkers == ParallelGCThreads, "Use ParalleGCThreads within safepoints");
2126     }
2127   } else {
2128     if (UseDynamicNumberOfGCThreads ||
2129         (FLAG_IS_DEFAULT(ConcGCThreads) && ForceDynamicNumberOfGCThreads)) {
2130       assert(nworkers <= ConcGCThreads, "Cannot use more than it has");
2131     } else {
2132       // Use ConcGCThreads outside safepoints
2133       assert(nworkers == ConcGCThreads, "Use ConcGCThreads outside safepoints");
2134     }


2210   }
2211 
2212   set_update_refs_in_progress(true);
2213   make_parsable(true);
2214   for (uint i = 0; i < num_regions(); i++) {
2215     ShenandoahHeapRegion* r = get_region(i);
2216     r->set_concurrent_iteration_safe_limit(r->top());
2217   }
2218 
2219   // Reset iterator.
2220   _update_refs_iterator.reset();
2221 
2222   if (ShenandoahPacing) {
2223     pacer()->setup_for_updaterefs();
2224   }
2225 }
2226 
2227 void ShenandoahHeap::op_final_updaterefs() {
2228   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
2229 


2230   // Check if there is left-over work, and finish it
2231   if (_update_refs_iterator.has_next()) {
2232     ShenandoahGCPhase final_work(ShenandoahPhaseTimings::final_update_refs_finish_work);
2233 
2234     // Finish updating references where we left off.
2235     clear_cancelled_gc();
2236     update_heap_references(false);
2237   }
2238 
2239   // Clear cancelled GC, if set. On cancellation path, the block before would handle
2240   // everything. On degenerated paths, cancelled gc would not be set anyway.
2241   if (cancelled_gc()) {
2242     clear_cancelled_gc();
2243   }
2244   assert(!cancelled_gc(), "Should have been done right before");
2245 
2246   if (ShenandoahVerify && !is_degenerated_gc_in_progress()) {
2247     verifier()->verify_roots_no_forwarded_except(ShenandoahRootVerifier::ThreadRoots);
2248   }
2249 
2250   if (is_degenerated_gc_in_progress()) {
2251     concurrent_mark()->update_roots(ShenandoahPhaseTimings::degen_gc_update_roots);
2252   } else {
2253     concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_update_refs_roots);
2254   }
2255 
2256   // Has to be done before cset is clear
2257   if (ShenandoahVerify) {
2258     verifier()->verify_roots_in_to_space();
2259   }
2260 
2261   ShenandoahGCPhase final_update_refs(ShenandoahPhaseTimings::final_update_refs_recycle);
2262 
2263   trash_cset_regions();
2264   set_has_forwarded_objects(false);
2265   set_update_refs_in_progress(false);
2266 
2267   if (ShenandoahVerify) {




1050     _rp(rp) {}
1051 
1052   void work(uint worker_id) {
1053     ShenandoahParallelWorkerSession worker_session(worker_id);
1054     ShenandoahEvacOOMScope oom_evac_scope;
1055     ShenandoahEvacuateUpdateRootsClosure cl;
1056     MarkingCodeBlobClosure blobsCl(&cl, CodeBlobToOopClosure::FixRelocations);
1057     _rp->roots_do(worker_id, &cl);
1058   }
1059 };
1060 
1061 void ShenandoahHeap::evacuate_and_update_roots() {
1062 #if COMPILER2_OR_JVMCI
1063   DerivedPointerTable::clear();
1064 #endif
1065   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "Only iterate roots while world is stopped");
1066   {
1067     // Include concurrent roots if current cycle can not process those roots concurrently
1068     ShenandoahRootEvacuator rp(workers()->active_workers(),
1069                                ShenandoahPhaseTimings::init_evac,
1070                                !ShenandoahConcurrentRoots::should_do_concurrent_roots(),
1071                                !ShenandoahConcurrentRoots::should_do_concurrent_class_unloading());
1072     ShenandoahEvacuateUpdateRootsTask roots_task(&rp);
1073     workers()->run_task(&roots_task);
1074   }
1075 
1076 #if COMPILER2_OR_JVMCI
1077   DerivedPointerTable::update_pointers();
1078 #endif
1079 }
1080 
1081 // Returns size in bytes
1082 size_t ShenandoahHeap::unsafe_max_tlab_alloc(Thread *thread) const {
1083   if (ShenandoahElasticTLAB) {
1084     // With Elastic TLABs, return the max allowed size, and let the allocation path
1085     // figure out the safe size for current allocation.
1086     return ShenandoahHeapRegion::max_tlab_size_bytes();
1087   } else {
1088     return MIN2(_free_set->unsafe_peek_free(), ShenandoahHeapRegion::max_tlab_size_bytes());
1089   }
1090 }
1091 


1489         heuristics()->choose_collection_set(_collection_set);
1490 
1491         _free_set->rebuild();
1492       }
1493     }
1494 
1495     // If collection set has candidates, start evacuation.
1496     // Otherwise, bypass the rest of the cycle.
1497     if (!collection_set()->is_empty()) {
1498       ShenandoahGCPhase init_evac(ShenandoahPhaseTimings::init_evac);
1499 
1500       if (ShenandoahVerify) {
1501         verifier()->verify_before_evacuation();
1502       }
1503 
1504       set_evacuation_in_progress(true);
1505       // From here on, we need to update references.
1506       set_has_forwarded_objects(true);
1507 
1508       if (!is_degenerated_gc_in_progress()) {
1509         prepare_concurrent_unloading();
1510         evacuate_and_update_roots();
1511       }
1512 
1513       if (ShenandoahPacing) {
1514         pacer()->setup_for_evac();
1515       }
1516 
1517       if (ShenandoahVerify) {
1518         ShenandoahRootVerifier::RootTypes types = ShenandoahRootVerifier::None;
1519         if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1520           types = ShenandoahRootVerifier::combine(ShenandoahRootVerifier::JNIHandleRoots, ShenandoahRootVerifier::WeakRoots);
1521           types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CLDGRoots);



1522         }
1523 
1524         if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1525           types = ShenandoahRootVerifier::combine(types, ShenandoahRootVerifier::CodeRoots);
1526         }
1527         verifier()->verify_roots_no_forwarded_except(types);
1528         verifier()->verify_during_evacuation();
1529       }
1530     } else {
1531       if (ShenandoahVerify) {
1532         verifier()->verify_after_concmark();
1533       }
1534 
1535       if (VerifyAfterGC) {
1536         Universe::verify();
1537       }
1538     }
1539 
1540   } else {
1541     concurrent_mark()->cancel();
1542     stop_concurrent_marking();
1543 
1544     if (process_references()) {
1545       // Abandon reference processing right away: pre-cleaning must have failed.
1546       ReferenceProcessor *rp = ref_processor();
1547       rp->disable_discovery();


1598 
1599   void work(uint worker_id) {
1600     ShenandoahEvacOOMScope oom;
1601     {
1602       // jni_roots and weak_roots are OopStorage backed roots, concurrent iteration
1603       // may race against OopStorage::release() calls.
1604       ShenandoahEvacUpdateOopStorageRootsClosure cl;
1605       _vm_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1606       _weak_roots.oops_do<ShenandoahEvacUpdateOopStorageRootsClosure>(&cl);
1607     }
1608 
1609     {
1610       ShenandoahEvacuateUpdateRootsClosure cl;
1611       CLDToOopClosure clds(&cl, ClassLoaderData::_claim_strong);
1612       _cld_roots.cld_do(&clds);
1613     }
1614   }
1615 };
1616 
1617 void ShenandoahHeap::op_roots() {
1618   if (is_evacuation_in_progress()) {
1619     if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
1620       _unloader.unload();
1621     }
1622 
1623     if (ShenandoahConcurrentRoots::should_do_concurrent_roots()) {
1624       ShenandoahConcurrentRootsEvacUpdateTask task;
1625       workers()->run_task(&task);
1626     }
1627   }
1628 }
1629 
1630 void ShenandoahHeap::op_reset() {
1631   reset_mark_bitmap();
1632 }
1633 
1634 void ShenandoahHeap::op_preclean() {
1635   concurrent_mark()->preclean_weak_refs();
1636 }
1637 
1638 void ShenandoahHeap::op_init_traversal() {
1639   traversal_gc()->init_traversal_collection();
1640 }
1641 
1642 void ShenandoahHeap::op_traversal() {
1643   traversal_gc()->concurrent_traversal_collection();
1644 }
1645 
1646 void ShenandoahHeap::op_final_traversal() {


1983     ShenandoahGCPhase phase(full_gc ?
1984                             ShenandoahPhaseTimings::full_gc_purge_par :
1985                             ShenandoahPhaseTimings::purge_par);
1986     ShenandoahIsAliveSelector is_alive;
1987     uint num_workers = _workers->active_workers();
1988     ShenandoahClassUnloadingTask unlink_task(is_alive.is_alive_closure(), num_workers, purged_class);
1989     _workers->run_task(&unlink_task);
1990   }
1991 
1992   {
1993     ShenandoahGCPhase phase(full_gc ?
1994                             ShenandoahPhaseTimings::full_gc_purge_cldg :
1995                             ShenandoahPhaseTimings::purge_cldg);
1996     ClassLoaderDataGraph::purge();
1997   }
1998   // Resize and verify metaspace
1999   MetaspaceGC::compute_new_size();
2000   MetaspaceUtils::verify_metrics();
2001 }
2002 
2003 // Weak roots are either pre-evacuated (final mark) or updated (final updaterefs),
2004 // so they should not have forwarded oops.
2005 // However, we do need to "null" dead oops in the roots, if can not be done
2006 // in concurrent cycles.
2007 void ShenandoahHeap::stw_process_weak_roots(bool full_gc) {
2008   ShenandoahGCPhase root_phase(full_gc ?
2009                                ShenandoahPhaseTimings::full_gc_purge :
2010                                ShenandoahPhaseTimings::purge);
2011   uint num_workers = _workers->active_workers();
2012   ShenandoahPhaseTimings::Phase timing_phase = full_gc ?
2013                                                ShenandoahPhaseTimings::full_gc_purge_par :
2014                                                ShenandoahPhaseTimings::purge_par;
2015   // Cleanup weak roots
2016   ShenandoahGCPhase phase(timing_phase);
2017   ShenandoahIsAliveClosure is_alive;







2018 #ifdef ASSERT
2019   ShenandoahAssertNotForwardedClosure verify_cl;
2020   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, ShenandoahAssertNotForwardedClosure>
2021     cleaning_task(&is_alive, &verify_cl, num_workers);
2022 #else
2023   ShenandoahParallelWeakRootsCleaningTask<ShenandoahIsAliveClosure, DoNothingClosure>
2024     cleaning_task(&is_alive, &do_nothing_cl, num_workers);
2025 #endif
2026   _workers->run_task(&cleaning_task);

2027 }
2028 
2029 void ShenandoahHeap::parallel_cleaning(bool full_gc) {
2030   assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2031   stw_process_weak_roots(full_gc);
2032   if (!ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2033     stw_unload_classes(full_gc);
2034   }
2035 }
2036 
2037 void ShenandoahHeap::set_has_forwarded_objects(bool cond) {
2038   set_gc_state_mask(HAS_FORWARDED, cond);
2039 }
2040 
2041 void ShenandoahHeap::set_process_references(bool pr) {
2042   _process_references.set_cond(pr);
2043 }
2044 
2045 void ShenandoahHeap::set_unload_classes(bool uc) {
2046   _unload_classes.set_cond(uc);
2047 }
2048 
2049 bool ShenandoahHeap::process_references() const {
2050   return _process_references.is_set();
2051 }
2052 
2053 bool ShenandoahHeap::unload_classes() const {
2054   return _unload_classes.is_set();


2077 }
2078 
2079 void ShenandoahHeap::set_degenerated_gc_in_progress(bool in_progress) {
2080   _degenerated_gc_in_progress.set_cond(in_progress);
2081 }
2082 
2083 void ShenandoahHeap::set_full_gc_in_progress(bool in_progress) {
2084   _full_gc_in_progress.set_cond(in_progress);
2085 }
2086 
2087 void ShenandoahHeap::set_full_gc_move_in_progress(bool in_progress) {
2088   assert (is_full_gc_in_progress(), "should be");
2089   _full_gc_move_in_progress.set_cond(in_progress);
2090 }
2091 
2092 void ShenandoahHeap::set_update_refs_in_progress(bool in_progress) {
2093   set_gc_state_mask(UPDATEREFS, in_progress);
2094 }
2095 
2096 void ShenandoahHeap::register_nmethod(nmethod* nm) {
2097   ShenandoahCodeRoots::register_nmethod(nm);
2098 }
2099 
2100 void ShenandoahHeap::unregister_nmethod(nmethod* nm) {
2101   ShenandoahCodeRoots::unregister_nmethod(nm);
2102 }
2103 
2104 void ShenandoahHeap::flush_nmethod(nmethod* nm) {
2105   ShenandoahCodeRoots::flush_nmethod(nm);
2106 }
2107 
2108 oop ShenandoahHeap::pin_object(JavaThread* thr, oop o) {
2109   ShenandoahHeapLocker locker(lock());
2110   heap_region_containing(o)->make_pinned();
2111   return o;
2112 }
2113 
2114 void ShenandoahHeap::unpin_object(JavaThread* thr, oop o) {
2115   ShenandoahHeapLocker locker(lock());
2116   heap_region_containing(o)->make_unpinned();
2117 }
2118 
2119 GCTimer* ShenandoahHeap::gc_timer() const {
2120   return _gc_timer;
2121 }
2122 
2123 void ShenandoahHeap::prepare_concurrent_unloading() {
2124   assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2125   if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2126     ShenandoahCodeRoots::prepare_concurrent_unloading();
2127     _unloader.prepare();
2128   }
2129 }
2130 
2131 void ShenandoahHeap::finish_concurrent_unloading() {
2132   assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
2133   if (ShenandoahConcurrentRoots::should_do_concurrent_class_unloading()) {
2134     _unloader.finish();
2135   }
2136 }
2137 
2138 #ifdef ASSERT
2139 void ShenandoahHeap::assert_gc_workers(uint nworkers) {
2140   assert(nworkers > 0 && nworkers <= max_workers(), "Sanity");
2141 
2142   if (ShenandoahSafepoint::is_at_shenandoah_safepoint()) {
2143     if (UseDynamicNumberOfGCThreads ||
2144         (FLAG_IS_DEFAULT(ParallelGCThreads) && ForceDynamicNumberOfGCThreads)) {
2145       assert(nworkers <= ParallelGCThreads, "Cannot use more than it has");
2146     } else {
2147       // Use ParallelGCThreads inside safepoints
2148       assert(nworkers == ParallelGCThreads, "Use ParalleGCThreads within safepoints");
2149     }
2150   } else {
2151     if (UseDynamicNumberOfGCThreads ||
2152         (FLAG_IS_DEFAULT(ConcGCThreads) && ForceDynamicNumberOfGCThreads)) {
2153       assert(nworkers <= ConcGCThreads, "Cannot use more than it has");
2154     } else {
2155       // Use ConcGCThreads outside safepoints
2156       assert(nworkers == ConcGCThreads, "Use ConcGCThreads outside safepoints");
2157     }


2233   }
2234 
2235   set_update_refs_in_progress(true);
2236   make_parsable(true);
2237   for (uint i = 0; i < num_regions(); i++) {
2238     ShenandoahHeapRegion* r = get_region(i);
2239     r->set_concurrent_iteration_safe_limit(r->top());
2240   }
2241 
2242   // Reset iterator.
2243   _update_refs_iterator.reset();
2244 
2245   if (ShenandoahPacing) {
2246     pacer()->setup_for_updaterefs();
2247   }
2248 }
2249 
2250 void ShenandoahHeap::op_final_updaterefs() {
2251   assert(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "must be at safepoint");
2252 
2253   finish_concurrent_unloading();
2254 
2255   // Check if there is left-over work, and finish it
2256   if (_update_refs_iterator.has_next()) {
2257     ShenandoahGCPhase final_work(ShenandoahPhaseTimings::final_update_refs_finish_work);
2258 
2259     // Finish updating references where we left off.
2260     clear_cancelled_gc();
2261     update_heap_references(false);
2262   }
2263 
2264   // Clear cancelled GC, if set. On cancellation path, the block before would handle
2265   // everything. On degenerated paths, cancelled gc would not be set anyway.
2266   if (cancelled_gc()) {
2267     clear_cancelled_gc();
2268   }
2269   assert(!cancelled_gc(), "Should have been done right before");
2270 
2271   if (ShenandoahVerify && !is_degenerated_gc_in_progress()) {
2272     verifier()->verify_roots_in_to_space_except(ShenandoahRootVerifier::ThreadRoots);
2273   }
2274 
2275   if (is_degenerated_gc_in_progress()) {
2276     concurrent_mark()->update_roots(ShenandoahPhaseTimings::degen_gc_update_roots);
2277   } else {
2278     concurrent_mark()->update_thread_roots(ShenandoahPhaseTimings::final_update_refs_roots);
2279   }
2280 
2281   // Has to be done before cset is clear
2282   if (ShenandoahVerify) {
2283     verifier()->verify_roots_in_to_space();
2284   }
2285 
2286   ShenandoahGCPhase final_update_refs(ShenandoahPhaseTimings::final_update_refs_recycle);
2287 
2288   trash_cset_regions();
2289   set_has_forwarded_objects(false);
2290   set_update_refs_in_progress(false);
2291 
2292   if (ShenandoahVerify) {


< prev index next >