1 /* 2 * Copyright (c) 2019, 2020, Red Hat, Inc. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 #ifndef SHARE_GC_SHENANDOAH_SHENANDOAHCLOSURES_INLINE_HPP 25 #define SHARE_GC_SHENANDOAH_SHENANDOAHCLOSURES_INLINE_HPP 26 27 #include "gc/shenandoah/shenandoahClosures.hpp" 28 29 #include "gc/shared/barrierSetNMethod.hpp" 30 #include "gc/shenandoah/shenandoahAsserts.hpp" 31 #include "gc/shenandoah/shenandoahBarrierSet.hpp" 32 #include "gc/shenandoah/shenandoahEvacOOMHandler.inline.hpp" 33 #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp" 34 #include "gc/shenandoah/shenandoahHeap.inline.hpp" 35 #include "gc/shenandoah/shenandoahNMethod.inline.hpp" 36 #include "gc/shenandoah/shenandoahMark.inline.hpp" 37 #include "gc/shenandoah/shenandoahReferenceProcessor.hpp" 38 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp" 39 #include "memory/iterator.inline.hpp" 40 #include "oops/compressedOops.inline.hpp" 41 #include "runtime/atomic.hpp" 42 #include "runtime/javaThread.hpp" 43 44 // 45 // ========= Super 46 // 47 48 ShenandoahSuperClosure::ShenandoahSuperClosure() : 49 MetadataVisitingOopIterateClosure(), _heap(ShenandoahHeap::heap()) {} 50 51 ShenandoahSuperClosure::ShenandoahSuperClosure(ShenandoahReferenceProcessor* rp) : 52 MetadataVisitingOopIterateClosure(rp), _heap(ShenandoahHeap::heap()) {} 53 54 void ShenandoahSuperClosure::do_nmethod(nmethod* nm) { 55 nm->run_nmethod_entry_barrier(); 56 } 57 58 // 59 // ========= Marking 60 // 61 62 ShenandoahMarkRefsSuperClosure::ShenandoahMarkRefsSuperClosure(ShenandoahObjToScanQueue* q, ShenandoahReferenceProcessor* rp) : 63 ShenandoahSuperClosure(rp), 64 _queue(q), 65 _mark_context(ShenandoahHeap::heap()->marking_context()), 66 _weak(false) {} 67 68 template<class T, ShenandoahGenerationType GENERATION> 69 inline void ShenandoahMarkRefsSuperClosure::work(T* p) { 70 ShenandoahMark::mark_through_ref<T, GENERATION>(p, _queue, _mark_context, _weak); 71 } 72 73 ShenandoahForwardedIsAliveClosure::ShenandoahForwardedIsAliveClosure() : 74 _mark_context(ShenandoahHeap::heap()->marking_context()) {} 75 76 bool ShenandoahForwardedIsAliveClosure::do_object_b(oop obj) { 77 if (CompressedOops::is_null(obj)) { 78 return false; 79 } 80 obj = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); 81 shenandoah_assert_not_forwarded_if(nullptr, obj, ShenandoahHeap::heap()->is_concurrent_mark_in_progress()); 82 return _mark_context->is_marked(obj); 83 } 84 85 ShenandoahIsAliveClosure::ShenandoahIsAliveClosure() : 86 _mark_context(ShenandoahHeap::heap()->marking_context()) {} 87 88 bool ShenandoahIsAliveClosure::do_object_b(oop obj) { 89 if (CompressedOops::is_null(obj)) { 90 return false; 91 } 92 shenandoah_assert_not_forwarded(nullptr, obj); 93 return _mark_context->is_marked(obj); 94 } 95 96 BoolObjectClosure* ShenandoahIsAliveSelector::is_alive_closure() { 97 return ShenandoahHeap::heap()->has_forwarded_objects() ? 98 reinterpret_cast<BoolObjectClosure*>(&_fwd_alive_cl) : 99 reinterpret_cast<BoolObjectClosure*>(&_alive_cl); 100 } 101 102 ShenandoahKeepAliveClosure::ShenandoahKeepAliveClosure() : 103 _bs(ShenandoahBarrierSet::barrier_set()) {} 104 105 template <typename T> 106 void ShenandoahKeepAliveClosure::do_oop_work(T* p) { 107 assert(ShenandoahHeap::heap()->is_concurrent_mark_in_progress(), "Only for concurrent marking phase"); 108 assert(!ShenandoahHeap::heap()->has_forwarded_objects(), "Not expected"); 109 110 T o = RawAccess<>::oop_load(p); 111 if (!CompressedOops::is_null(o)) { 112 oop obj = CompressedOops::decode_not_null(o); 113 _bs->enqueue(obj); 114 } 115 } 116 117 118 // 119 // ========= Evacuating + Roots 120 // 121 122 template <bool CONCURRENT, bool STABLE_THREAD> 123 void ShenandoahEvacuateUpdateRootClosureBase<CONCURRENT, STABLE_THREAD>::do_oop(oop* p) { 124 if (CONCURRENT) { 125 ShenandoahEvacOOMScope scope; 126 do_oop_work(p); 127 } else { 128 do_oop_work(p); 129 } 130 } 131 132 template <bool CONCURRENT, bool STABLE_THREAD> 133 void ShenandoahEvacuateUpdateRootClosureBase<CONCURRENT, STABLE_THREAD>::do_oop(narrowOop* p) { 134 if (CONCURRENT) { 135 ShenandoahEvacOOMScope scope; 136 do_oop_work(p); 137 } else { 138 do_oop_work(p); 139 } 140 } 141 142 template <bool CONCURRENT, bool STABLE_THREAD> 143 template <class T> 144 void ShenandoahEvacuateUpdateRootClosureBase<CONCURRENT, STABLE_THREAD>::do_oop_work(T* p) { 145 assert(_heap->is_concurrent_weak_root_in_progress() || 146 _heap->is_concurrent_strong_root_in_progress(), 147 "Only do this in root processing phase"); 148 149 T o = RawAccess<>::oop_load(p); 150 if (!CompressedOops::is_null(o)) { 151 oop obj = CompressedOops::decode_not_null(o); 152 if (_heap->in_collection_set(obj)) { 153 assert(_heap->is_evacuation_in_progress(), "Only do this when evacuation is in progress"); 154 shenandoah_assert_marked(p, obj); 155 oop resolved = ShenandoahBarrierSet::resolve_forwarded_not_null(obj); 156 if (resolved == obj) { 157 Thread* thr = STABLE_THREAD ? _thread : Thread::current(); 158 assert(thr == Thread::current(), "Wrong thread"); 159 160 resolved = _heap->evacuate_object(obj, thr); 161 } 162 if (CONCURRENT) { 163 ShenandoahHeap::atomic_update_oop(resolved, p, o); 164 } else { 165 RawAccess<IS_NOT_NULL | MO_UNORDERED>::oop_store(p, resolved); 166 } 167 } 168 } 169 } 170 171 template <bool CONCURRENT, typename IsAlive, typename KeepAlive> 172 ShenandoahCleanUpdateWeakOopsClosure<CONCURRENT, IsAlive, KeepAlive>::ShenandoahCleanUpdateWeakOopsClosure(IsAlive* is_alive, KeepAlive* keep_alive) : 173 _is_alive(is_alive), _keep_alive(keep_alive) { 174 if (!CONCURRENT) { 175 assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint"); 176 } 177 } 178 179 template <bool CONCURRENT, typename IsAlive, typename KeepAlive> 180 void ShenandoahCleanUpdateWeakOopsClosure<CONCURRENT, IsAlive, KeepAlive>::do_oop(oop* p) { 181 oop obj = RawAccess<>::oop_load(p); 182 if (!CompressedOops::is_null(obj)) { 183 if (_is_alive->do_object_b(obj)) { 184 _keep_alive->do_oop(p); 185 } else { 186 if (CONCURRENT) { 187 ShenandoahHeap::atomic_clear_oop(p, obj); 188 } else { 189 RawAccess<IS_NOT_NULL>::oop_store(p, oop()); 190 } 191 } 192 } 193 } 194 195 template <bool CONCURRENT, typename IsAlive, typename KeepAlive> 196 void ShenandoahCleanUpdateWeakOopsClosure<CONCURRENT, IsAlive, KeepAlive>::do_oop(narrowOop* p) { 197 ShouldNotReachHere(); 198 } 199 200 ShenandoahNMethodAndDisarmClosure::ShenandoahNMethodAndDisarmClosure(OopClosure* cl) : 201 NMethodToOopClosure(cl, true /* fix_relocations */), 202 _bs(BarrierSet::barrier_set()->barrier_set_nmethod()) { 203 } 204 205 void ShenandoahNMethodAndDisarmClosure::do_nmethod(nmethod* nm) { 206 assert(nm != nullptr, "Sanity"); 207 assert(!ShenandoahNMethod::gc_data(nm)->is_unregistered(), "Should not be here"); 208 NMethodToOopClosure::do_nmethod(nm); 209 _bs->disarm(nm); 210 } 211 212 213 // 214 // ========= Update References 215 // 216 217 template <ShenandoahGenerationType GENERATION> 218 ShenandoahMarkUpdateRefsClosure<GENERATION>::ShenandoahMarkUpdateRefsClosure(ShenandoahObjToScanQueue* q, ShenandoahReferenceProcessor* rp) : 219 ShenandoahMarkRefsSuperClosure(q, rp) { 220 assert(_heap->is_stw_gc_in_progress(), "Can only be used for STW GC"); 221 } 222 223 template<ShenandoahGenerationType GENERATION> 224 template<class T> 225 inline void ShenandoahMarkUpdateRefsClosure<GENERATION>::work(T* p) { 226 // Update the location 227 _heap->non_conc_update_with_forwarded(p); 228 229 // ...then do the usual thing 230 ShenandoahMarkRefsSuperClosure::work<T, GENERATION>(p); 231 } 232 233 template<class T> 234 inline void ShenandoahNonConcUpdateRefsClosure::work(T* p) { 235 _heap->non_conc_update_with_forwarded(p); 236 } 237 238 template<class T> 239 inline void ShenandoahConcUpdateRefsClosure::work(T* p) { 240 _heap->conc_update_with_forwarded(p); 241 } 242 243 244 // 245 // ========= Utilities 246 // 247 248 #ifdef ASSERT 249 template <class T> 250 void ShenandoahAssertNotForwardedClosure::do_oop_work(T* p) { 251 T o = RawAccess<>::oop_load(p); 252 if (!CompressedOops::is_null(o)) { 253 oop obj = CompressedOops::decode_not_null(o); 254 shenandoah_assert_not_forwarded(p, obj); 255 } 256 } 257 258 void ShenandoahAssertNotForwardedClosure::do_oop(narrowOop* p) { do_oop_work(p); } 259 void ShenandoahAssertNotForwardedClosure::do_oop(oop* p) { do_oop_work(p); } 260 #endif 261 262 #endif // SHARE_GC_SHENANDOAH_SHENANDOAHCLOSURES_INLINE_HPP