1 /*
  2  * Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  */
 23 
 24 #include "precompiled.hpp"
 25 #include "classfile/classLoaderData.hpp"
 26 #include "gc/shared/barrierSetNMethod.hpp"
 27 #include "gc/shared/gc_globals.hpp"
 28 #include "gc/shared/taskqueue.inline.hpp"
 29 #include "gc/z/zAddress.inline.hpp"
 30 #include "gc/z/zCollectedHeap.hpp"
 31 #include "gc/z/zGlobals.hpp"
 32 #include "gc/z/zGranuleMap.inline.hpp"
 33 #include "gc/z/zHeapIterator.hpp"
 34 #include "gc/z/zLock.inline.hpp"
 35 #include "gc/z/zNMethod.hpp"
 36 #include "gc/z/zOop.inline.hpp"
 37 #include "memory/iterator.inline.hpp"
 38 #include "utilities/bitMap.inline.hpp"
 39 
 40 class ZHeapIteratorBitMap : public CHeapObj<mtGC> {
 41 private:
 42   CHeapBitMap _bitmap;
 43 
 44 public:
 45   ZHeapIteratorBitMap(size_t size_in_bits) :
 46       _bitmap(size_in_bits, mtGC) {}
 47 
 48   bool try_set_bit(size_t index) {
 49     return _bitmap.par_set_bit(index);
 50   }
 51 };
 52 
 53 class ZHeapIteratorContext {
 54 private:
 55   ZHeapIterator* const           _iter;
 56   ZHeapIteratorQueue* const      _queue;
 57   ZHeapIteratorArrayQueue* const _array_queue;
 58   const uint                     _worker_id;
 59   ZStatTimerDisable              _timer_disable;
 60 
 61 public:
 62   ZHeapIteratorContext(ZHeapIterator* iter, uint worker_id) :
 63       _iter(iter),
 64       _queue(_iter->_queues.queue(worker_id)),
 65       _array_queue(_iter->_array_queues.queue(worker_id)),
 66       _worker_id(worker_id) {}
 67 
 68   void mark_and_push(oop obj) const {
 69     if (_iter->mark_object(obj)) {
 70       _queue->push(obj);
 71     }
 72   }
 73 
 74   void push_array(const ObjArrayTask& array) const {
 75     _array_queue->push(array);
 76   }
 77 
 78   bool pop(oop& obj) const {
 79     return _queue->pop_overflow(obj) || _queue->pop_local(obj);
 80   }
 81 
 82   bool pop_array(ObjArrayTask& array) const {
 83     return _array_queue->pop_overflow(array) || _array_queue->pop_local(array);
 84   }
 85 
 86   bool steal(oop& obj) const {
 87     return _iter->_queues.steal(_worker_id, obj);
 88   }
 89 
 90   bool steal_array(ObjArrayTask& array) const {
 91     return _iter->_array_queues.steal(_worker_id, array);
 92   }
 93 
 94   bool is_drained() const {
 95     return _queue->is_empty() && _array_queue->is_empty();
 96   }
 97 };
 98 
 99 template <bool Weak>
100 class ZHeapIteratorRootOopClosure : public OopClosure {
101 private:
102   const ZHeapIteratorContext& _context;
103 
104   oop load_oop(oop* p) {
105     if (Weak) {
106       return NativeAccess<AS_NO_KEEPALIVE | ON_PHANTOM_OOP_REF>::oop_load(p);
107     }
108 
109     return NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
110   }
111 
112 public:
113   ZHeapIteratorRootOopClosure(const ZHeapIteratorContext& context) :
114       _context(context) {}
115 
116   virtual void do_oop(oop* p) {
117     const oop obj = load_oop(p);
118     _context.mark_and_push(obj);
119   }
120 
121   virtual void do_oop(narrowOop* p) {
122     ShouldNotReachHere();
123   }
124 };
125 
126 template <bool VisitReferents>
127 class ZHeapIteratorOopClosure : public OopIterateClosure {
128 private:
129   const ZHeapIteratorContext& _context;
130   const oop                   _base;
131 
132   oop load_oop(oop* p) {
133     assert(ZCollectedHeap::heap()->is_in(p), "Should be in heap");
134 
135     if (VisitReferents) {
136       return HeapAccess<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>::oop_load_at(_base, _base->field_offset(p));
137     }
138 
139     return HeapAccess<AS_NO_KEEPALIVE>::oop_load(p);
140   }
141 
142 public:
143   ZHeapIteratorOopClosure(const ZHeapIteratorContext& context, oop base) :
144       OopIterateClosure(),
145       _context(context),
146       _base(base) {}
147 
148   virtual ReferenceIterationMode reference_iteration_mode() {
149     return VisitReferents ? DO_FIELDS : DO_FIELDS_EXCEPT_REFERENT;
150   }
151 
152   virtual void do_oop(oop* p) {
153     const oop obj = load_oop(p);
154     _context.mark_and_push(obj);
155   }
156 
157   virtual void do_oop(narrowOop* p) {
158     ShouldNotReachHere();
159   }
160 
161   virtual bool do_metadata() {
162     return true;
163   }
164 
165   virtual void do_klass(Klass* k) {
166     ClassLoaderData* const cld = k->class_loader_data();
167     ZHeapIteratorOopClosure::do_cld(cld);
168   }
169 
170   virtual void do_cld(ClassLoaderData* cld) {
171     class NativeAccessClosure : public OopClosure {
172     private:
173       const ZHeapIteratorContext& _context;
174 
175     public:
176       explicit NativeAccessClosure(const ZHeapIteratorContext& context) :
177           _context(context) {}
178 
179       virtual void do_oop(oop* p) {
180         assert(!ZCollectedHeap::heap()->is_in(p), "Should not be in heap");
181         const oop obj = NativeAccess<AS_NO_KEEPALIVE>::oop_load(p);
182         _context.mark_and_push(obj);
183       }
184 
185       virtual void do_oop(narrowOop* p) {
186         ShouldNotReachHere();
187       }
188     };
189 
190     NativeAccessClosure cl(_context);
191     cld->oops_do(&cl, ClassLoaderData::_claim_other);
192   }
193 };
194 
195 ZHeapIterator::ZHeapIterator(uint nworkers, bool visit_weaks) :
196     _visit_weaks(visit_weaks),
197     _timer_disable(),
198     _bitmaps(ZAddressOffsetMax),
199     _bitmaps_lock(),
200     _queues(nworkers),
201     _array_queues(nworkers),
202     _roots(ClassLoaderData::_claim_other),
203     _weak_roots(),
204     _terminator(nworkers, &_queues) {
205 
206   // Create queues
207   for (uint i = 0; i < _queues.size(); i++) {
208     ZHeapIteratorQueue* const queue = new ZHeapIteratorQueue();
209     _queues.register_queue(i, queue);
210   }
211 
212   // Create array queues
213   for (uint i = 0; i < _array_queues.size(); i++) {
214     ZHeapIteratorArrayQueue* const array_queue = new ZHeapIteratorArrayQueue();
215     _array_queues.register_queue(i, array_queue);
216   }
217 }
218 
219 ZHeapIterator::~ZHeapIterator() {
220   // Destroy bitmaps
221   ZHeapIteratorBitMapsIterator iter(&_bitmaps);
222   for (ZHeapIteratorBitMap* bitmap; iter.next(&bitmap);) {
223     delete bitmap;
224   }
225 
226   // Destroy array queues
227   for (uint i = 0; i < _array_queues.size(); i++) {
228     delete _array_queues.queue(i);
229   }
230 
231   // Destroy queues
232   for (uint i = 0; i < _queues.size(); i++) {
233     delete _queues.queue(i);
234   }
235 }
236 
237 static size_t object_index_max() {
238   return ZGranuleSize >> ZObjectAlignmentSmallShift;
239 }
240 
241 static size_t object_index(oop obj) {
242   const uintptr_t addr = ZOop::to_address(obj);
243   const uintptr_t offset = ZAddress::offset(addr);
244   const uintptr_t mask = ZGranuleSize - 1;
245   return (offset & mask) >> ZObjectAlignmentSmallShift;
246 }
247 
248 ZHeapIteratorBitMap* ZHeapIterator::object_bitmap(oop obj) {
249   const uintptr_t offset = ZAddress::offset(ZOop::to_address(obj));
250   ZHeapIteratorBitMap* bitmap = _bitmaps.get_acquire(offset);
251   if (bitmap == NULL) {
252     ZLocker<ZLock> locker(&_bitmaps_lock);
253     bitmap = _bitmaps.get(offset);
254     if (bitmap == NULL) {
255       // Install new bitmap
256       bitmap = new ZHeapIteratorBitMap(object_index_max());
257       _bitmaps.release_put(offset, bitmap);
258     }
259   }
260 
261   return bitmap;
262 }
263 
264 bool ZHeapIterator::mark_object(oop obj) {
265   if (obj == NULL) {
266     return false;
267   }
268 
269   ZHeapIteratorBitMap* const bitmap = object_bitmap(obj);
270   const size_t index = object_index(obj);
271   return bitmap->try_set_bit(index);
272 }
273 
274 typedef ClaimingCLDToOopClosure<ClassLoaderData::_claim_other> ZHeapIteratorCLDCLosure;
275 
276 class ZHeapIteratorNMethodClosure : public NMethodClosure {
277 private:
278   OopClosure* const        _cl;
279   BarrierSetNMethod* const _bs_nm;
280 
281 public:
282   ZHeapIteratorNMethodClosure(OopClosure* cl) :
283       _cl(cl),
284       _bs_nm(BarrierSet::barrier_set()->barrier_set_nmethod()) {}
285 
286   virtual void do_nmethod(nmethod* nm) {
287     // If ClassUnloading is turned off, all nmethods are considered strong,
288     // not only those on the call stacks. The heap iteration might happen
289     // before the concurrent processign of the code cache, make sure that
290     // all nmethods have been processed before visiting the oops.
291     _bs_nm->nmethod_entry_barrier(nm);
292 
293     ZNMethod::nmethod_oops_do(nm, _cl);
294   }
295 };
296 
297 class ZHeapIteratorThreadClosure : public ThreadClosure {
298 private:
299   OopClosure* const        _cl;
300   CodeBlobToNMethodClosure _cb_cl;
301 
302 public:
303   ZHeapIteratorThreadClosure(OopClosure* cl, NMethodClosure* nm_cl) :
304       _cl(cl),
305       _cb_cl(nm_cl) {}
306 
307   void do_thread(Thread* thread) {
308     thread->oops_do(_cl, &_cb_cl);
309   }
310 };
311 
312 void ZHeapIterator::push_strong_roots(const ZHeapIteratorContext& context) {
313   ZHeapIteratorRootOopClosure<false /* Weak */> cl(context);
314   ZHeapIteratorCLDCLosure cld_cl(&cl);
315   ZHeapIteratorNMethodClosure nm_cl(&cl);
316   ZHeapIteratorThreadClosure thread_cl(&cl, &nm_cl);
317 
318   _roots.apply(&cl,
319                &cld_cl,
320                &thread_cl,
321                &nm_cl);
322 }
323 
324 void ZHeapIterator::push_weak_roots(const ZHeapIteratorContext& context) {
325   ZHeapIteratorRootOopClosure<true  /* Weak */> cl(context);
326   _weak_roots.apply(&cl);
327 }
328 
329 template <bool VisitWeaks>
330 void ZHeapIterator::push_roots(const ZHeapIteratorContext& context) {
331   push_strong_roots(context);
332   if (VisitWeaks) {
333     push_weak_roots(context);
334   }
335 }
336 
337 template <bool VisitReferents>
338 void ZHeapIterator::follow_object(const ZHeapIteratorContext& context, oop obj) {
339   ZHeapIteratorOopClosure<VisitReferents> cl(context, obj);
340   obj->oop_iterate(&cl);
341 }
342 
343 void ZHeapIterator::follow_array(const ZHeapIteratorContext& context, oop obj) {
344   // Follow klass
345   ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
346   cl.do_klass(obj->klass());
347 
348   // Push array chunk
349   context.push_array(ObjArrayTask(obj, 0 /* index */));
350 }
351 
352 void ZHeapIterator::follow_array_chunk(const ZHeapIteratorContext& context, const ObjArrayTask& array) {
353   const objArrayOop obj = objArrayOop(array.obj());
354   const int length = obj->length();
355   const int start = array.index();
356   const int stride = MIN2<int>(length - start, ObjArrayMarkingStride);
357   const int end = start + stride;
358 
359   // Push remaining array chunk first
360   if (end < length) {
361     context.push_array(ObjArrayTask(obj, end));
362   }
363 
364   // Follow array chunk
365   ZHeapIteratorOopClosure<false /* VisitReferents */> cl(context, obj);
366   obj->oop_iterate_range(&cl, start, end);
367 }
368 
369 template <bool VisitWeaks>
370 void ZHeapIterator::visit_and_follow(const ZHeapIteratorContext& context, ObjectClosure* cl, oop obj) {
371   // Visit
372   cl->do_object(obj);
373 
374   // Follow
375   if (obj->is_objArray()) {
376     follow_array(context, obj);
377   } else {
378     follow_object<VisitWeaks>(context, obj);
379   }
380 }
381 
382 template <bool VisitWeaks>
383 void ZHeapIterator::drain(const ZHeapIteratorContext& context, ObjectClosure* cl) {
384   ObjArrayTask array;
385   oop obj;
386 
387   do {
388     while (context.pop(obj)) {
389       visit_and_follow<VisitWeaks>(context, cl, obj);
390     }
391 
392     if (context.pop_array(array)) {
393       follow_array_chunk(context, array);
394     }
395   } while (!context.is_drained());
396 }
397 
398 template <bool VisitWeaks>
399 void ZHeapIterator::steal(const ZHeapIteratorContext& context, ObjectClosure* cl) {
400   ObjArrayTask array;
401   oop obj;
402 
403   if (context.steal_array(array)) {
404     follow_array_chunk(context, array);
405   } else if (context.steal(obj)) {
406     visit_and_follow<VisitWeaks>(context, cl, obj);
407   }
408 }
409 
410 template <bool VisitWeaks>
411 void ZHeapIterator::drain_and_steal(const ZHeapIteratorContext& context, ObjectClosure* cl) {
412   do {
413     drain<VisitWeaks>(context, cl);
414     steal<VisitWeaks>(context, cl);
415   } while (!context.is_drained() || !_terminator.offer_termination());
416 }
417 
418 template <bool VisitWeaks>
419 void ZHeapIterator::object_iterate_inner(const ZHeapIteratorContext& context, ObjectClosure* object_cl) {
420   push_roots<VisitWeaks>(context);
421   drain_and_steal<VisitWeaks>(context, object_cl);
422 }
423 
424 void ZHeapIterator::object_iterate(ObjectClosure* cl, uint worker_id) {
425   ZHeapIteratorContext context(this, worker_id);
426 
427   if (_visit_weaks) {
428     object_iterate_inner<true /* VisitWeaks */>(context, cl);
429   } else {
430     object_iterate_inner<false /* VisitWeaks */>(context, cl);
431   }
432 }