1 /*
  2  * Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  */
 23 
 24 #include "precompiled.hpp"
 25 #include "classfile/classLoaderData.hpp"
 26 #include "gc/shared/barrierSetNMethod.hpp"
 27 #include "gc/shared/gc_globals.hpp"
 28 #include "gc/shared/taskqueue.inline.hpp"
 29 #include "gc/z/zAddress.inline.hpp"
 30 #include "gc/z/zCollectedHeap.hpp"
 31 #include "gc/z/zGlobals.hpp"
 32 #include "gc/z/zGranuleMap.inline.hpp"
 33 #include "gc/z/zHeapIterator.hpp"
 34 #include "gc/z/zLock.inline.hpp"
 35 #include "gc/z/zNMethod.hpp"
 36 #include "gc/z/zOop.inline.hpp"
 37 #include "memory/iterator.inline.hpp"
 38 #include "utilities/bitMap.inline.hpp"
 39 
 40 class ZHeapIteratorBitMap : public CHeapObj<mtGC> {
 41 private:
 42   CHeapBitMap _bitmap;
 43 
 44 public:
 45   ZHeapIteratorBitMap(size_t size_in_bits) :
 46       _bitmap(size_in_bits, mtGC) {}
 47 
 48   bool try_set_bit(size_t index) {
 49     return _bitmap.par_set_bit(index);
 50   }
 51 };
 52 
 53 class ZHeapIteratorContext {
 54 private:
 55   ZHeapIterator* const           _iter;
 56   ZHeapIteratorQueue* const      _queue;
 57   ZHeapIteratorArrayQueue* const _array_queue;
 58   const uint                     _worker_id;
 59   ZStatTimerDisable              _timer_disable;
 60 
 61 public:
 62   ZHeapIteratorContext(ZHeapIterator* iter, uint worker_id) :
 63       _iter(iter),
 64       _queue(_iter->_queues.queue(worker_id)),
 65       _array_queue(_iter->_array_queues.queue(worker_id)),
 66       _worker_id(worker_id) {}
 67 
 68   void mark_and_push(oop obj) const {
 69     if (_iter->mark_object(obj)) {
 70       _queue->push(obj);
 71     }
 72   }
 73 
 74   void push_array(const ObjArrayTask& array) const {
 75     _array_queue->push(array);
 76   }
 77 
 78   bool pop(oop& obj) const {
 79     return _queue->pop_overflow(obj) || _queue->pop_local(obj);
 80   }
 81 
 82   bool pop_array(ObjArrayTask& array) const {
 83     return _array_queue->pop_overflow(array) || _array_queue->pop_local(array);
 84   }
 85 
 86   bool steal(oop& obj) const {
 87     return _iter->_queues.steal(_worker_id, obj);
 88   }
 89 
 90   bool steal_array(ObjArrayTask& array) const {
 91     return _iter->_array_queues.steal(_worker_id, array);
 92   }
 93 
 94   bool is_drained() const {
 95     return _queue->is_empty() && _array_queue->is_empty();
 96   }
 97 };
 98 
 99 template <bool Weak>
100 class ZHeapIteratorRootOopClosure : public OopClosure {
101 private:
102   const ZHeapIteratorContext& _context;
103 
104   oop load_oop(oop* p) {
105     if (Weak) {
106       return NativeAccess<AS_NO_KEEPALIVE | ON_PHANTOM_OOP_REF>::oop_load(p);
107     }
108 
109     return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
110   }
111 
112 public:
113   ZHeapIteratorRootOopClosure(const ZHeapIteratorContext& context) :
114       _context(context) {}
115 
116   virtual void do_oop(oop* p) {
117     const oop obj = load_oop(p);
118     _context.mark_and_push(obj);
119   }
120 
121   virtual void do_oop(narrowOop* p) {
122     ShouldNotReachHere();
123   }
124 };
125 
126 template <bool VisitReferents>
127 class ZHeapIteratorOopClosure : public OopIterateClosure {
128 private:
129   const ZHeapIteratorContext& _context;
130   const oop                   _base;
131 
132   oop load_oop(oop* p) {
133     assert(ZCollectedHeap::heap()->is_in(p), "Should be in heap");
134 
135     if (VisitReferents) {
136       return HeapAccess<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>::oop_load_at(_base, _base->field_offset(p));
137     }
138 
139     return HeapAccess<AS_NO_KEEPALIVE>::oop_load(p);
140   }
141 
142 public:
143   ZHeapIteratorOopClosure(const ZHeapIteratorContext& context, oop base) :
144       OopIterateClosure(),
145       _context(context),
146       _base(base) {}
147 
148   virtual ReferenceIterationMode reference_iteration_mode() {
149     return VisitReferents ? DO_FIELDS : DO_FIELDS_EXCEPT_REFERENT;
150   }
151 
152   virtual void do_oop(oop* p) {
153     const oop obj = load_oop(p);
154     _context.mark_and_push(obj);
155   }
156 
157   virtual void do_oop(narrowOop* p) {
158     ShouldNotReachHere();
159   }
160 
161   virtual bool do_metadata() {
162     return true;
163   }
164 
165   virtual void do_klass(Klass* k) {
166     ClassLoaderData* const cld = k->class_loader_data();
167     ZHeapIteratorOopClosure::do_cld(cld);
168   }
169 
170   virtual void do_cld(ClassLoaderData* cld) {
171     class NativeAccessClosure : public OopClosure {
172     private:
173       const ZHeapIteratorContext& _context;
174 
175     public:
176       explicit NativeAccessClosure(const ZHeapIteratorContext& context) :
177           _context(context) {}
178 
179       virtual void do_oop(oop* p) {
180         assert(!ZCollectedHeap::heap()->is_in(p), "Should not be in heap");
181         const oop obj = NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
182         _context.mark_and_push(obj);
183       }
184 
185       virtual void do_oop(narrowOop* p) {
186         ShouldNotReachHere();
187       }
188     };
189 
190     NativeAccessClosure cl(_context);
191     cld->oops_do(&cl, ClassLoaderData::_claim_other);
192   }
193 
194   // Don't follow loom stack metadata; it's already followed in other ways through CLDs
195   virtual void do_nmethod(nmethod* nm) {}
196   virtual void do_method(Method* m) {}
197 };
198 
199 ZHeapIterator::ZHeapIterator(uint nworkers, bool visit_weaks) :
200     _visit_weaks(visit_weaks),
201     _timer_disable(),
202     _bitmaps(ZAddressOffsetMax),
203     _bitmaps_lock(),
204     _queues(nworkers),
205     _array_queues(nworkers),
206     _roots(ClassLoaderData::_claim_other),
207     _weak_roots(),
208     _terminator(nworkers, &_queues) {
209 
210   // Create queues
211   for (uint i = 0; i < _queues.size(); i++) {
212     ZHeapIteratorQueue* const queue = new ZHeapIteratorQueue();
213     _queues.register_queue(i, queue);
214   }
215 
216   // Create array queues
217   for (uint i = 0; i < _array_queues.size(); i++) {
218     ZHeapIteratorArrayQueue* const array_queue = new ZHeapIteratorArrayQueue();
219     _array_queues.register_queue(i, array_queue);
220   }
221 }
222 
223 ZHeapIterator::~ZHeapIterator() {
224   // Destroy bitmaps
225   ZHeapIteratorBitMapsIterator iter(&_bitmaps);
226   for (ZHeapIteratorBitMap* bitmap; iter.next(&bitmap);) {
227     delete bitmap;
228   }
229 
230   // Destroy array queues
231   for (uint i = 0; i < _array_queues.size(); i++) {
232     delete _array_queues.queue(i);
233   }
234 
235   // Destroy queues
236   for (uint i = 0; i < _queues.size(); i++) {
237     delete _queues.queue(i);
238   }
239 }
240 
241 static size_t object_index_max() {
242   return ZGranuleSize >> ZObjectAlignmentSmallShift;
243 }
244 
245 static size_t object_index(oop obj) {
246   const uintptr_t addr = ZOop::to_address(obj);
247   const uintptr_t offset = ZAddress::offset(addr);
248   const uintptr_t mask = ZGranuleSize - 1;
249   return (offset & mask) >> ZObjectAlignmentSmallShift;
250 }
251 
252 ZHeapIteratorBitMap* ZHeapIterator::object_bitmap(oop obj) {
253   const uintptr_t offset = ZAddress::offset(ZOop::to_address(obj));
254   ZHeapIteratorBitMap* bitmap = _bitmaps.get_acquire(offset);
255   if (bitmap == NULL) {
256     ZLocker<ZLock> locker(&_bitmaps_lock);
257     bitmap = _bitmaps.get(offset);
258     if (bitmap == NULL) {
259       // Install new bitmap
260       bitmap = new ZHeapIteratorBitMap(object_index_max());
261       _bitmaps.release_put(offset, bitmap);
262     }
263   }
264 
265   return bitmap;
266 }
267 
268 bool ZHeapIterator::mark_object(oop obj) {
269   if (obj == NULL) {
270     return false;
271   }
272 
273   ZHeapIteratorBitMap* const bitmap = object_bitmap(obj);
274   const size_t index = object_index(obj);
275   return bitmap->try_set_bit(index);
276 }
277 
278 typedef ClaimingCLDToOopClosure<ClassLoaderData::_claim_other> ZHeapIteratorCLDCLosure;
279 
280 class ZHeapIteratorNMethodClosure : public NMethodClosure {
281 private:
282   OopClosure* const        _cl;
283   BarrierSetNMethod* const _bs_nm;
284 
285 public:
286   ZHeapIteratorNMethodClosure(OopClosure* cl) :
287       _cl(cl),
288       _bs_nm(BarrierSet::barrier_set()->barrier_set_nmethod()) {}
289 
290   virtual void do_nmethod(nmethod* nm) {
291     // If ClassUnloading is turned off, all nmethods are considered strong,
292     // not only those on the call stacks. The heap iteration might happen
293     // before the concurrent processign of the code cache, make sure that
294     // all nmethods have been processed before visiting the oops.
295     _bs_nm->nmethod_entry_barrier(nm);
296 
297     ZNMethod::nmethod_oops_do(nm, _cl);
298   }
299 };
300 
301 class ZHeapIteratorThreadClosure : public ThreadClosure {
302 private:
303   OopClosure* const        _cl;
304   CodeBlobToNMethodClosure _cb_cl;
305 
306 public:
307   ZHeapIteratorThreadClosure(OopClosure* cl, NMethodClosure* nm_cl) :
308       _cl(cl),
309       _cb_cl(nm_cl) {}
310 
311   void do_thread(Thread* thread) {
312     thread->oops_do(_cl, &_cb_cl);
313   }
314 };
315 
316 void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
317   ZHeapIteratorRootOopClosure<false /* Weak */> cl(context);
318   ZHeapIteratorCLDCLosure cld_cl(&cl);
319   ZHeapIteratorNMethodClosure nm_cl(&cl);
320   ZHeapIteratorThreadClosure thread_cl(&cl, &nm_cl);
321 
322   _roots.apply(&cl,
323                &cld_cl,
324                &thread_cl,
325                &nm_cl);
326 }
327 
328 void ZHeapIterator::push_weak_roots(const ZHeapIteratorContext& context) {
329   ZHeapIteratorRootOopClosure<true  /* Weak */> cl(context);
330   _weak_roots.apply(&cl);
331 }
332 
333 template <bool VisitWeaks>
334 void ZHeapIterator::push_roots(const ZHeapIteratorContext& context) {
335   push_strong_roots(context);
336   if (VisitWeaks) {
337     push_weak_roots(context);
338   }
339 }
340 
341 template <bool VisitReferents>
342 void ZHeapIterator::follow_object(const ZHeapIteratorContext& context, oop obj) {
343   ZHeapIteratorOopClosure<VisitReferents> cl(context, obj);
344   obj->oop_iterate(&cl);
345 }
346 
347 void ZHeapIterator::follow_array(const ZHeapIteratorContext& context, oop obj) {
348   // Follow klass
349   ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
350   cl.do_klass(obj->klass());
351 
352   // Push array chunk
353   context.push_array(ObjArrayTask(obj, 0 /* index */));
354 }
355 
356 void ZHeapIterator::follow_array_chunk(const ZHeapIteratorContext& context, const ObjArrayTask& array) {
357   const objArrayOop obj = objArrayOop(array.obj());
358   const int length = obj->length();
359   const int start = array.index();
360   const int stride = MIN2<int>(length - start, ObjArrayMarkingStride);
361   const int end = start + stride;
362 
363   // Push remaining array chunk first
364   if (end < length) {
365     context.push_array(ObjArrayTask(obj, end));
366   }
367 
368   // Follow array chunk
369   ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
370   obj->oop_iterate_range(&cl, start, end);
371 }
372 
373 template <bool VisitWeaks>
374 void ZHeapIterator::visit_and_follow(const ZHeapIteratorContext& context, ObjectClosure* cl, oop obj) {
375   // Visit
376   cl->do_object(obj);
377 
378   // Follow
379   if (obj->is_objArray()) {
380     follow_array(context, obj);
381   } else {
382     follow_object<VisitWeaks>(context, obj);
383   }
384 }
385 
386 template <bool VisitWeaks>
387 void ZHeapIterator::drain(const ZHeapIteratorContext& context, ObjectClosure* cl) {
388   ObjArrayTask array;
389   oop obj;
390 
391   do {
392     while (context.pop(obj)) {
393       visit_and_follow<VisitWeaks>(context, cl, obj);
394     }
395 
396     if (context.pop_array(array)) {
397       follow_array_chunk(context, array);
398     }
399   } while (!context.is_drained());
400 }
401 
402 template <bool VisitWeaks>
403 void ZHeapIterator::steal(const ZHeapIteratorContext& context, ObjectClosure* cl) {
404   ObjArrayTask array;
405   oop obj;
406 
407   if (context.steal_array(array)) {
408     follow_array_chunk(context, array);
409   } else if (context.steal(obj)) {
410     visit_and_follow<VisitWeaks>(context, cl, obj);
411   }
412 }
413 
414 template <bool VisitWeaks>
415 void ZHeapIterator::drain_and_steal(const ZHeapIteratorContext& context, ObjectClosure* cl) {
416   do {
417     drain<VisitWeaks>(context, cl);
418     steal<VisitWeaks>(context, cl);
419   } while (!context.is_drained() || !_terminator.offer_termination());
420 }
421 
422 template <bool VisitWeaks>
423 void ZHeapIterator::object_iterate_inner(const ZHeapIteratorContext& context, ObjectClosure* object_cl) {
424   push_roots<VisitWeaks>(context);
425   drain_and_steal<VisitWeaks>(context, object_cl);
426 }
427 
428 void ZHeapIterator::object_iterate(ObjectClosure* cl, uint worker_id) {
429   ZHeapIteratorContext context(this, worker_id);
430 
431   if (_visit_weaks) {
432     object_iterate_inner<true /* VisitWeaks */>(context, cl);
433   } else {
434     object_iterate_inner<false /* VisitWeaks */>(context, cl);
435   }
436 }