1 /*
  2  * Copyright (c) 2002, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/classLoaderData.inline.hpp"
 27 #include "classfile/classLoaderDataGraph.hpp"
 28 #include "classfile/moduleEntry.hpp"
 29 #include "classfile/vmClasses.hpp"
 30 #include "gc/shared/collectedHeap.hpp"
 31 #include "logging/log.hpp"
 32 #include "logging/logTag.hpp"
 33 #include "memory/heapInspection.hpp"
 34 #include "memory/resourceArea.hpp"
 35 #include "memory/universe.hpp"
 36 #include "oops/oop.inline.hpp"
 37 #include "oops/reflectionAccessorImplKlassHelper.hpp"
 38 #include "runtime/atomic.hpp"
 39 #include "runtime/os.hpp"
 40 #include "services/memTracker.hpp"
 41 #include "utilities/globalDefinitions.hpp"
 42 #include "utilities/macros.hpp"
 43 #include "utilities/stack.inline.hpp"
 44 
 45 // HeapInspection
 46 
 47 inline KlassInfoEntry::~KlassInfoEntry() {
 48   if (_subclasses != NULL) {
 49     delete _subclasses;
 50   }
 51 }
 52 
 53 inline void KlassInfoEntry::add_subclass(KlassInfoEntry* cie) {
 54   if (_subclasses == NULL) {
 55     _subclasses = new  (ResourceObj::C_HEAP, mtServiceability) GrowableArray<KlassInfoEntry*>(4, mtServiceability);
 56   }
 57   _subclasses->append(cie);
 58 }
 59 
 60 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) {
 61   if(e1->_instance_words > e2->_instance_words) {
 62     return -1;
 63   } else if(e1->_instance_words < e2->_instance_words) {
 64     return 1;
 65   }
 66   // Sort alphabetically, note 'Z' < '[' < 'a', but it's better to group
 67   // the array classes before all the instance classes.
 68   ResourceMark rm;
 69   const char* name1 = e1->klass()->external_name();
 70   const char* name2 = e2->klass()->external_name();
 71   bool d1 = (name1[0] == JVM_SIGNATURE_ARRAY);
 72   bool d2 = (name2[0] == JVM_SIGNATURE_ARRAY);
 73   if (d1 && !d2) {
 74     return -1;
 75   } else if (d2 && !d1) {
 76     return 1;
 77   } else {
 78     return strcmp(name1, name2);
 79   }
 80 }
 81 
 82 const char* KlassInfoEntry::name() const {
 83   const char* name;
 84   if (_klass->name() != NULL) {
 85     name = _klass->external_name();
 86   } else {
 87     if (_klass == Universe::boolArrayKlassObj())         name = "<boolArrayKlass>";         else
 88     if (_klass == Universe::charArrayKlassObj())         name = "<charArrayKlass>";         else
 89     if (_klass == Universe::floatArrayKlassObj())        name = "<floatArrayKlass>";        else
 90     if (_klass == Universe::doubleArrayKlassObj())       name = "<doubleArrayKlass>";       else
 91     if (_klass == Universe::byteArrayKlassObj())         name = "<byteArrayKlass>";         else
 92     if (_klass == Universe::shortArrayKlassObj())        name = "<shortArrayKlass>";        else
 93     if (_klass == Universe::intArrayKlassObj())          name = "<intArrayKlass>";          else
 94     if (_klass == Universe::longArrayKlassObj())         name = "<longArrayKlass>";         else
 95       name = "<no name>";
 96   }
 97   return name;
 98 }
 99 
100 void KlassInfoEntry::print_on(outputStream* st) const {
101   ResourceMark rm;
102 
103   // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit
104   ModuleEntry* module = _klass->module();
105   if (module->is_named()) {
106     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s (%s%s%s)",
107                  (int64_t)_instance_count,
108                  (uint64_t)_instance_words * HeapWordSize,
109                  name(),
110                  module->name()->as_C_string(),
111                  module->version() != NULL ? "@" : "",
112                  module->version() != NULL ? module->version()->as_C_string() : "");
113   } else {
114     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s",
115                  (int64_t)_instance_count,
116                  (uint64_t)_instance_words * HeapWordSize,
117                  name());
118   }
119 }
120 
121 KlassInfoEntry* KlassInfoBucket::lookup(Klass* const k) {
122   // Can happen if k is an archived class that we haven't loaded yet.
123   if (k->java_mirror_no_keepalive() == NULL) {
124     return NULL;
125   }
126 
127   KlassInfoEntry* elt = _list;
128   while (elt != NULL) {
129     if (elt->is_equal(k)) {
130       return elt;
131     }
132     elt = elt->next();
133   }
134   elt = new (std::nothrow) KlassInfoEntry(k, list());
135   // We may be out of space to allocate the new entry.
136   if (elt != NULL) {
137     set_list(elt);
138   }
139   return elt;
140 }
141 
142 void KlassInfoBucket::iterate(KlassInfoClosure* cic) {
143   KlassInfoEntry* elt = _list;
144   while (elt != NULL) {
145     cic->do_cinfo(elt);
146     elt = elt->next();
147   }
148 }
149 
150 void KlassInfoBucket::empty() {
151   KlassInfoEntry* elt = _list;
152   _list = NULL;
153   while (elt != NULL) {
154     KlassInfoEntry* next = elt->next();
155     delete elt;
156     elt = next;
157   }
158 }
159 
160 class KlassInfoTable::AllClassesFinder : public LockedClassesDo {
161   KlassInfoTable *_table;
162 public:
163   AllClassesFinder(KlassInfoTable* table) : _table(table) {}
164   virtual void do_klass(Klass* k) {
165     // This has the SIDE EFFECT of creating a KlassInfoEntry
166     // for <k>, if one doesn't exist yet.
167     _table->lookup(k);
168   }
169 };
170 
171 
172 KlassInfoTable::KlassInfoTable(bool add_all_classes) {
173   _size_of_instances_in_words = 0;
174   _ref = (HeapWord*) Universe::boolArrayKlassObj();
175   _buckets =
176     (KlassInfoBucket*)  AllocateHeap(sizeof(KlassInfoBucket) * _num_buckets,
177        mtInternal, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
178   if (_buckets != NULL) {
179     for (int index = 0; index < _num_buckets; index++) {
180       _buckets[index].initialize();
181     }
182     if (add_all_classes) {
183       AllClassesFinder finder(this);
184       ClassLoaderDataGraph::classes_do(&finder);
185     }
186   }
187 }
188 
189 KlassInfoTable::~KlassInfoTable() {
190   if (_buckets != NULL) {
191     for (int index = 0; index < _num_buckets; index++) {
192       _buckets[index].empty();
193     }
194     FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets);
195     _buckets = NULL;
196   }
197 }
198 
199 uint KlassInfoTable::hash(const Klass* p) {
200   return (uint)(((uintptr_t)p - (uintptr_t)_ref) >> 2);
201 }
202 
203 KlassInfoEntry* KlassInfoTable::lookup(Klass* k) {
204   uint         idx = hash(k) % _num_buckets;
205   assert(_buckets != NULL, "Allocation failure should have been caught");
206   KlassInfoEntry*  e   = _buckets[idx].lookup(k);
207   // Lookup may fail if this is a new klass for which we
208   // could not allocate space for an new entry, or if it's
209   // an archived class that we haven't loaded yet.
210   assert(e == NULL || k == e->klass(), "must be equal");
211   return e;
212 }
213 
214 // Return false if the entry could not be recorded on account
215 // of running out of space required to create a new entry.
216 bool KlassInfoTable::record_instance(const oop obj) {
217   Klass*        k = obj->klass();
218   KlassInfoEntry* elt = lookup(k);
219   // elt may be NULL if it's a new klass for which we
220   // could not allocate space for a new entry in the hashtable.
221   if (elt != NULL) {
222     elt->set_count(elt->count() + 1);
223     elt->set_words(elt->words() + obj->size());
224     _size_of_instances_in_words += obj->size();
225     return true;
226   } else {
227     return false;
228   }
229 }
230 
231 void KlassInfoTable::iterate(KlassInfoClosure* cic) {
232   assert(_buckets != NULL, "Allocation failure should have been caught");
233   for (int index = 0; index < _num_buckets; index++) {
234     _buckets[index].iterate(cic);
235   }
236 }
237 
238 size_t KlassInfoTable::size_of_instances_in_words() const {
239   return _size_of_instances_in_words;
240 }
241 
242 // Return false if the entry could not be recorded on account
243 // of running out of space required to create a new entry.
244 bool KlassInfoTable::merge_entry(const KlassInfoEntry* cie) {
245   Klass*          k = cie->klass();
246   KlassInfoEntry* elt = lookup(k);
247   // elt may be NULL if it's a new klass for which we
248   // could not allocate space for a new entry in the hashtable.
249   if (elt != NULL) {
250     elt->set_count(elt->count() + cie->count());
251     elt->set_words(elt->words() + cie->words());
252     _size_of_instances_in_words += cie->words();
253     return true;
254   }
255   return false;
256 }
257 
258 class KlassInfoTableMergeClosure : public KlassInfoClosure {
259 private:
260   KlassInfoTable* _dest;
261   bool _success;
262 public:
263   KlassInfoTableMergeClosure(KlassInfoTable* table) : _dest(table), _success(true) {}
264   void do_cinfo(KlassInfoEntry* cie) {
265     _success &= _dest->merge_entry(cie);
266   }
267   bool success() { return _success; }
268 };
269 
270 // merge from table
271 bool KlassInfoTable::merge(KlassInfoTable* table) {
272   KlassInfoTableMergeClosure closure(this);
273   table->iterate(&closure);
274   return closure.success();
275 }
276 
277 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) {
278   return (*e1)->compare(*e1,*e2);
279 }
280 
281 KlassInfoHisto::KlassInfoHisto(KlassInfoTable* cit) :
282   _cit(cit) {
283   _elements = new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<KlassInfoEntry*>(_histo_initial_size, mtServiceability);
284 }
285 
286 KlassInfoHisto::~KlassInfoHisto() {
287   delete _elements;
288 }
289 
290 void KlassInfoHisto::add(KlassInfoEntry* cie) {
291   elements()->append(cie);
292 }
293 
294 void KlassInfoHisto::sort() {
295   elements()->sort(KlassInfoHisto::sort_helper);
296 }
297 
298 void KlassInfoHisto::print_elements(outputStream* st) const {
299   // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit
300   int64_t total = 0;
301   uint64_t totalw = 0;
302   for(int i=0; i < elements()->length(); i++) {
303     st->print("%4d: ", i+1);
304     elements()->at(i)->print_on(st);
305     total += elements()->at(i)->count();
306     totalw += elements()->at(i)->words();
307   }
308   st->print_cr("Total " INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13),
309                total, totalw * HeapWordSize);
310 }
311 
312 class HierarchyClosure : public KlassInfoClosure {
313 private:
314   GrowableArray<KlassInfoEntry*> *_elements;
315 public:
316   HierarchyClosure(GrowableArray<KlassInfoEntry*> *_elements) : _elements(_elements) {}
317 
318   void do_cinfo(KlassInfoEntry* cie) {
319     // ignore array classes
320     if (cie->klass()->is_instance_klass()) {
321       _elements->append(cie);
322     }
323   }
324 };
325 
326 void KlassHierarchy::print_class_hierarchy(outputStream* st, bool print_interfaces,
327                                            bool print_subclasses, char* classname) {
328   ResourceMark rm;
329   Stack <KlassInfoEntry*, mtClass> class_stack;
330   GrowableArray<KlassInfoEntry*> elements;
331 
332   // Add all classes to the KlassInfoTable, which allows for quick lookup.
333   // A KlassInfoEntry will be created for each class.
334   KlassInfoTable cit(true);
335   if (cit.allocation_failed()) {
336     st->print_cr("ERROR: Ran out of C-heap; hierarchy not generated");
337     return;
338   }
339 
340   // Add all created KlassInfoEntry instances to the elements array for easy
341   // iteration, and to allow each KlassInfoEntry instance to have a unique index.
342   HierarchyClosure hc(&elements);
343   cit.iterate(&hc);
344 
345   for(int i = 0; i < elements.length(); i++) {
346     KlassInfoEntry* cie = elements.at(i);
347     Klass* super = cie->klass()->super();
348 
349     // Set the index for the class.
350     cie->set_index(i + 1);
351 
352     // Add the class to the subclass array of its superclass.
353     if (super != NULL) {
354       KlassInfoEntry* super_cie = cit.lookup(super);
355       assert(super_cie != NULL, "could not lookup superclass");
356       super_cie->add_subclass(cie);
357     }
358   }
359 
360   // Set the do_print flag for each class that should be printed.
361   for(int i = 0; i < elements.length(); i++) {
362     KlassInfoEntry* cie = elements.at(i);
363     if (classname == NULL) {
364       // We are printing all classes.
365       cie->set_do_print(true);
366     } else {
367       // We are only printing the hierarchy of a specific class.
368       if (strcmp(classname, cie->klass()->external_name()) == 0) {
369         KlassHierarchy::set_do_print_for_class_hierarchy(cie, &cit, print_subclasses);
370       }
371     }
372   }
373 
374   // Now we do a depth first traversal of the class hierachry. The class_stack will
375   // maintain the list of classes we still need to process. Start things off
376   // by priming it with java.lang.Object.
377   KlassInfoEntry* jlo_cie = cit.lookup(vmClasses::Object_klass());
378   assert(jlo_cie != NULL, "could not lookup java.lang.Object");
379   class_stack.push(jlo_cie);
380 
381   // Repeatedly pop the top item off the stack, print its class info,
382   // and push all of its subclasses on to the stack. Do this until there
383   // are no classes left on the stack.
384   while (!class_stack.is_empty()) {
385     KlassInfoEntry* curr_cie = class_stack.pop();
386     if (curr_cie->do_print()) {
387       print_class(st, curr_cie, print_interfaces);
388       if (curr_cie->subclasses() != NULL) {
389         // Current class has subclasses, so push all of them onto the stack.
390         for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
391           KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
392           if (cie->do_print()) {
393             class_stack.push(cie);
394           }
395         }
396       }
397     }
398   }
399 
400   st->flush();
401 }
402 
403 // Sets the do_print flag for every superclass and subclass of the specified class.
404 void KlassHierarchy::set_do_print_for_class_hierarchy(KlassInfoEntry* cie, KlassInfoTable* cit,
405                                                       bool print_subclasses) {
406   // Set do_print for all superclasses of this class.
407   Klass* super = ((InstanceKlass*)cie->klass())->java_super();
408   while (super != NULL) {
409     KlassInfoEntry* super_cie = cit->lookup(super);
410     super_cie->set_do_print(true);
411     super = super->super();
412   }
413 
414   // Set do_print for this class and all of its subclasses.
415   Stack <KlassInfoEntry*, mtClass> class_stack;
416   class_stack.push(cie);
417   while (!class_stack.is_empty()) {
418     KlassInfoEntry* curr_cie = class_stack.pop();
419     curr_cie->set_do_print(true);
420     if (print_subclasses && curr_cie->subclasses() != NULL) {
421       // Current class has subclasses, so push all of them onto the stack.
422       for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
423         KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
424         class_stack.push(cie);
425       }
426     }
427   }
428 }
429 
430 static void print_indent(outputStream* st, int indent) {
431   while (indent != 0) {
432     st->print("|");
433     indent--;
434     if (indent != 0) {
435       st->print("  ");
436     }
437   }
438 }
439 
440 // Print the class name and its unique ClassLoader identifer.
441 static void print_classname(outputStream* st, Klass* klass) {
442   oop loader_oop = klass->class_loader_data()->class_loader();
443   st->print("%s/", klass->external_name());
444   if (loader_oop == NULL) {
445     st->print("null");
446   } else {
447     st->print(INTPTR_FORMAT, p2i(klass->class_loader_data()));
448   }
449 }
450 
451 static void print_interface(outputStream* st, InstanceKlass* intf_klass, const char* intf_type, int indent) {
452   print_indent(st, indent);
453   st->print("  implements ");
454   print_classname(st, intf_klass);
455   st->print(" (%s intf)\n", intf_type);
456 }
457 
458 void KlassHierarchy::print_class(outputStream* st, KlassInfoEntry* cie, bool print_interfaces) {
459   ResourceMark rm;
460   InstanceKlass* klass = (InstanceKlass*)cie->klass();
461   int indent = 0;
462 
463   // Print indentation with proper indicators of superclass.
464   Klass* super = klass->super();
465   while (super != NULL) {
466     super = super->super();
467     indent++;
468   }
469   print_indent(st, indent);
470   if (indent != 0) st->print("--");
471 
472   // Print the class name, its unique ClassLoader identifer, and if it is an interface.
473   print_classname(st, klass);
474   if (klass->is_interface()) {
475     st->print(" (intf)");
476   }
477   // Special treatment for generated core reflection accessor classes: print invocation target.
478   if (ReflectionAccessorImplKlassHelper::is_generated_accessor(klass)) {
479     st->print(" (invokes: ");
480     ReflectionAccessorImplKlassHelper::print_invocation_target(st, klass);
481     st->print(")");
482   }
483   st->print("\n");
484 
485   // Print any interfaces the class has.
486   if (print_interfaces) {
487     Array<InstanceKlass*>* local_intfs = klass->local_interfaces();
488     Array<InstanceKlass*>* trans_intfs = klass->transitive_interfaces();
489     for (int i = 0; i < local_intfs->length(); i++) {
490       print_interface(st, local_intfs->at(i), "declared", indent);
491     }
492     for (int i = 0; i < trans_intfs->length(); i++) {
493       InstanceKlass* trans_interface = trans_intfs->at(i);
494       // Only print transitive interfaces if they are not also declared.
495       if (!local_intfs->contains(trans_interface)) {
496         print_interface(st, trans_interface, "inherited", indent);
497       }
498     }
499   }
500 }
501 
502 void KlassInfoHisto::print_histo_on(outputStream* st) {
503   st->print_cr(" num     #instances         #bytes  class name (module)");
504   st->print_cr("-------------------------------------------------------");
505   print_elements(st);
506 }
507 
508 class HistoClosure : public KlassInfoClosure {
509  private:
510   KlassInfoHisto* _cih;
511  public:
512   HistoClosure(KlassInfoHisto* cih) : _cih(cih) {}
513 
514   void do_cinfo(KlassInfoEntry* cie) {
515     _cih->add(cie);
516   }
517 };
518 
519 class RecordInstanceClosure : public ObjectClosure {
520  private:
521   KlassInfoTable* _cit;
522   uintx _missed_count;
523   BoolObjectClosure* _filter;
524  public:
525   RecordInstanceClosure(KlassInfoTable* cit, BoolObjectClosure* filter) :
526     _cit(cit), _missed_count(0), _filter(filter) {}
527 
528   void do_object(oop obj) {
529     if (should_visit(obj)) {
530       if (!_cit->record_instance(obj)) {
531         _missed_count++;
532       }
533     }
534   }
535 
536   uintx missed_count() { return _missed_count; }
537 
538  private:
539   bool should_visit(oop obj) {
540     return _filter == NULL || _filter->do_object_b(obj);
541   }
542 };
543 
544 // Heap inspection for every worker.
545 // When native OOM happens for KlassInfoTable, set _success to false.
546 void ParHeapInspectTask::work(uint worker_id) {
547   uintx missed_count = 0;
548   bool merge_success = true;
549   if (!Atomic::load(&_success)) {
550     // other worker has failed on parallel iteration.
551     return;
552   }
553 
554   KlassInfoTable cit(false);
555   if (cit.allocation_failed()) {
556     // fail to allocate memory, stop parallel mode
557     Atomic::store(&_success, false);
558     return;
559   }
560   RecordInstanceClosure ric(&cit, _filter);
561   _poi->object_iterate(&ric, worker_id);
562   missed_count = ric.missed_count();
563   {
564     MutexLocker x(&_mutex, Mutex::_no_safepoint_check_flag);
565     merge_success = _shared_cit->merge(&cit);
566   }
567   if (merge_success) {
568     Atomic::add(&_missed_count, missed_count);
569   } else {
570     Atomic::store(&_success, false);
571   }
572 }
573 
574 uintx HeapInspection::populate_table(KlassInfoTable* cit, BoolObjectClosure *filter, uint parallel_thread_num) {
575 
576   // Try parallel first.
577   if (parallel_thread_num > 1) {
578     ResourceMark rm;
579 
580     WorkGang* gang = Universe::heap()->safepoint_workers();
581     if (gang != NULL) {
582       // The GC provided a WorkGang to be used during a safepoint.
583 
584       // Can't run with more threads than provided by the WorkGang.
585       WithUpdatedActiveWorkers update_and_restore(gang, parallel_thread_num);
586 
587       ParallelObjectIterator* poi = Universe::heap()->parallel_object_iterator(gang->active_workers());
588       if (poi != NULL) {
589         // The GC supports parallel object iteration.
590 
591         ParHeapInspectTask task(poi, cit, filter);
592         // Run task with the active workers.
593         gang->run_task(&task);
594 
595         delete poi;
596         if (task.success()) {
597           return task.missed_count();
598         }
599       }
600     }
601   }
602 
603   ResourceMark rm;
604   // If no parallel iteration available, run serially.
605   RecordInstanceClosure ric(cit, filter);
606   Universe::heap()->object_iterate(&ric);
607   return ric.missed_count();
608 }
609 
610 void HeapInspection::heap_inspection(outputStream* st, uint parallel_thread_num) {
611   ResourceMark rm;
612 
613   KlassInfoTable cit(false);
614   if (!cit.allocation_failed()) {
615     // populate table with object allocation info
616     uintx missed_count = populate_table(&cit, NULL, parallel_thread_num);
617     if (missed_count != 0) {
618       log_info(gc, classhisto)("WARNING: Ran out of C-heap; undercounted " UINTX_FORMAT
619                                " total instances in data below",
620                                missed_count);
621     }
622 
623     // Sort and print klass instance info
624     KlassInfoHisto histo(&cit);
625     HistoClosure hc(&histo);
626 
627     cit.iterate(&hc);
628 
629     histo.sort();
630     histo.print_histo_on(st);
631   } else {
632     st->print_cr("ERROR: Ran out of C-heap; histogram not generated");
633   }
634   st->flush();
635 }
636 
637 class FindInstanceClosure : public ObjectClosure {
638  private:
639   Klass* _klass;
640   GrowableArray<oop>* _result;
641 
642  public:
643   FindInstanceClosure(Klass* k, GrowableArray<oop>* result) : _klass(k), _result(result) {};
644 
645   void do_object(oop obj) {
646     if (obj->is_a(_klass)) {
647       // obj was read with AS_NO_KEEPALIVE, or equivalent.
648       // The object needs to be kept alive when it is published.
649       Universe::heap()->keep_alive(obj);
650 
651       _result->append(obj);
652     }
653   }
654 };
655 
656 void HeapInspection::find_instances_at_safepoint(Klass* k, GrowableArray<oop>* result) {
657   assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped");
658   assert(Heap_lock->is_locked(), "should have the Heap_lock");
659 
660   // Ensure that the heap is parsable
661   Universe::heap()->ensure_parsability(false);  // no need to retire TALBs
662 
663   // Iterate over objects in the heap
664   FindInstanceClosure fic(k, result);
665   Universe::heap()->object_iterate(&fic);
666 }