1 /*
  2  * Copyright (c) 2002, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "classfile/classLoaderData.inline.hpp"
 26 #include "classfile/classLoaderDataGraph.hpp"
 27 #include "classfile/moduleEntry.hpp"
 28 #include "classfile/vmClasses.hpp"
 29 #include "gc/shared/collectedHeap.hpp"
 30 #include "logging/log.hpp"
 31 #include "logging/logTag.hpp"
 32 #include "memory/heapInspection.hpp"
 33 #include "memory/resourceArea.hpp"
 34 #include "memory/universe.hpp"
 35 #include "nmt/memTracker.hpp"
 36 #include "oops/oop.inline.hpp"
 37 #include "runtime/atomic.hpp"
 38 #include "runtime/os.hpp"
 39 #include "utilities/globalDefinitions.hpp"
 40 #include "utilities/macros.hpp"
 41 #include "utilities/stack.inline.hpp"
 42 
 43 // HeapInspection
 44 
 45 inline KlassInfoEntry::~KlassInfoEntry() {
 46   if (_subclasses != nullptr) {
 47     delete _subclasses;
 48   }
 49 }
 50 
 51 inline void KlassInfoEntry::add_subclass(KlassInfoEntry* cie) {
 52   if (_subclasses == nullptr) {
 53     _subclasses = new (mtServiceability) GrowableArray<KlassInfoEntry*>(4, mtServiceability);
 54   }
 55   _subclasses->append(cie);
 56 }
 57 
 58 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) {
 59   if(e1->_instance_words > e2->_instance_words) {
 60     return -1;
 61   } else if(e1->_instance_words < e2->_instance_words) {
 62     return 1;
 63   }
 64   // Sort alphabetically, note 'Z' < '[' < 'a', but it's better to group
 65   // the array classes before all the instance classes.
 66   ResourceMark rm;
 67   const char* name1 = e1->klass()->external_name();
 68   const char* name2 = e2->klass()->external_name();
 69   bool d1 = (name1[0] == JVM_SIGNATURE_ARRAY);
 70   bool d2 = (name2[0] == JVM_SIGNATURE_ARRAY);
 71   if (d1 && !d2) {
 72     return -1;
 73   } else if (d2 && !d1) {
 74     return 1;
 75   } else {
 76     return strcmp(name1, name2);
 77   }
 78 }
 79 
 80 const char* KlassInfoEntry::name() const {
 81   const char* name;
 82   if (_klass->name() != nullptr) {
 83     name = _klass->external_name();
 84   } else {
 85     if (_klass == Universe::boolArrayKlass())         name = "<boolArrayKlass>";         else
 86     if (_klass == Universe::charArrayKlass())         name = "<charArrayKlass>";         else
 87     if (_klass == Universe::floatArrayKlass())        name = "<floatArrayKlass>";        else
 88     if (_klass == Universe::doubleArrayKlass())       name = "<doubleArrayKlass>";       else
 89     if (_klass == Universe::byteArrayKlass())         name = "<byteArrayKlass>";         else
 90     if (_klass == Universe::shortArrayKlass())        name = "<shortArrayKlass>";        else
 91     if (_klass == Universe::intArrayKlass())          name = "<intArrayKlass>";          else
 92     if (_klass == Universe::longArrayKlass())         name = "<longArrayKlass>";         else
 93       name = "<no name>";
 94   }
 95   return name;
 96 }
 97 
 98 void KlassInfoEntry::print_on(outputStream* st) const {
 99   ResourceMark rm;
100 
101   // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit
102   ModuleEntry* module = _klass->module();
103   if (module->is_named()) {
104     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s (%s%s%s)",
105                  (int64_t)_instance_count,
106                  (uint64_t)_instance_words * HeapWordSize,
107                  name(),
108                  module->name()->as_C_string(),
109                  module->version() != nullptr ? "@" : "",
110                  module->version() != nullptr ? module->version()->as_C_string() : "");
111   } else {
112     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s",
113                  (int64_t)_instance_count,
114                  (uint64_t)_instance_words * HeapWordSize,
115                  name());
116   }
117 }
118 
119 KlassInfoEntry* KlassInfoBucket::lookup(Klass* const k) {
120   // Can happen if k is an archived class that we haven't loaded yet.
121   if (k->java_mirror_no_keepalive() == nullptr) {
122     return nullptr;
123   }
124 
125   KlassInfoEntry* elt = _list;
126   while (elt != nullptr) {
127     if (elt->is_equal(k)) {
128       return elt;
129     }
130     elt = elt->next();
131   }
132   elt = new (std::nothrow) KlassInfoEntry(k, list());
133   // We may be out of space to allocate the new entry.
134   if (elt != nullptr) {
135     set_list(elt);
136   }
137   return elt;
138 }
139 
140 void KlassInfoBucket::iterate(KlassInfoClosure* cic) {
141   KlassInfoEntry* elt = _list;
142   while (elt != nullptr) {
143     cic->do_cinfo(elt);
144     elt = elt->next();
145   }
146 }
147 
148 void KlassInfoBucket::empty() {
149   KlassInfoEntry* elt = _list;
150   _list = nullptr;
151   while (elt != nullptr) {
152     KlassInfoEntry* next = elt->next();
153     delete elt;
154     elt = next;
155   }
156 }
157 
158 class KlassInfoTable::AllClassesFinder : public LockedClassesDo {
159   KlassInfoTable *_table;
160 public:
161   AllClassesFinder(KlassInfoTable* table) : _table(table) {}
162   virtual void do_klass(Klass* k) {
163     // This has the SIDE EFFECT of creating a KlassInfoEntry
164     // for <k>, if one doesn't exist yet.
165     _table->lookup(k);
166   }
167 };
168 
169 
170 KlassInfoTable::KlassInfoTable(bool add_all_classes) {
171   _size_of_instances_in_words = 0;
172   _ref = (uintptr_t) Universe::boolArrayKlass();
173   _buckets =
174     (KlassInfoBucket*)  AllocateHeap(sizeof(KlassInfoBucket) * _num_buckets,
175        mtInternal, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
176   if (_buckets != nullptr) {
177     for (int index = 0; index < _num_buckets; index++) {
178       _buckets[index].initialize();
179     }
180     if (add_all_classes) {
181       AllClassesFinder finder(this);
182       ClassLoaderDataGraph::classes_do(&finder);
183     }
184   }
185 }
186 
187 KlassInfoTable::~KlassInfoTable() {
188   if (_buckets != nullptr) {
189     for (int index = 0; index < _num_buckets; index++) {
190       _buckets[index].empty();
191     }
192     FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets);
193     _buckets = nullptr;
194   }
195 }
196 
197 uint KlassInfoTable::hash(const Klass* p) {
198   return (uint)(((uintptr_t)p - _ref) >> 2);
199 }
200 
201 KlassInfoEntry* KlassInfoTable::lookup(Klass* k) {
202   uint         idx = hash(k) % _num_buckets;
203   assert(_buckets != nullptr, "Allocation failure should have been caught");
204   KlassInfoEntry*  e   = _buckets[idx].lookup(k);
205   // Lookup may fail if this is a new klass for which we
206   // could not allocate space for an new entry, or if it's
207   // an archived class that we haven't loaded yet.
208   assert(e == nullptr || k == e->klass(), "must be equal");
209   return e;
210 }
211 
212 // Return false if the entry could not be recorded on account
213 // of running out of space required to create a new entry.
214 bool KlassInfoTable::record_instance(const oop obj) {
215   Klass*        k = obj->klass();
216   KlassInfoEntry* elt = lookup(k);
217   // elt may be null if it's a new klass for which we
218   // could not allocate space for a new entry in the hashtable.
219   if (elt != nullptr) {
220     elt->set_count(elt->count() + 1);
221     elt->set_words(elt->words() + obj->size());
222     _size_of_instances_in_words += obj->size();
223     return true;
224   } else {
225     return false;
226   }
227 }
228 
229 void KlassInfoTable::iterate(KlassInfoClosure* cic) {
230   assert(_buckets != nullptr, "Allocation failure should have been caught");
231   for (int index = 0; index < _num_buckets; index++) {
232     _buckets[index].iterate(cic);
233   }
234 }
235 
236 size_t KlassInfoTable::size_of_instances_in_words() const {
237   return _size_of_instances_in_words;
238 }
239 
240 // Return false if the entry could not be recorded on account
241 // of running out of space required to create a new entry.
242 bool KlassInfoTable::merge_entry(const KlassInfoEntry* cie) {
243   Klass*          k = cie->klass();
244   KlassInfoEntry* elt = lookup(k);
245   // elt may be null if it's a new klass for which we
246   // could not allocate space for a new entry in the hashtable.
247   if (elt != nullptr) {
248     elt->set_count(elt->count() + cie->count());
249     elt->set_words(elt->words() + cie->words());
250     _size_of_instances_in_words += cie->words();
251     return true;
252   }
253   return false;
254 }
255 
256 class KlassInfoTableMergeClosure : public KlassInfoClosure {
257 private:
258   KlassInfoTable* _dest;
259   bool _success;
260 public:
261   KlassInfoTableMergeClosure(KlassInfoTable* table) : _dest(table), _success(true) {}
262   void do_cinfo(KlassInfoEntry* cie) {
263     _success &= _dest->merge_entry(cie);
264   }
265   bool success() { return _success; }
266 };
267 
268 // merge from table
269 bool KlassInfoTable::merge(KlassInfoTable* table) {
270   KlassInfoTableMergeClosure closure(this);
271   table->iterate(&closure);
272   return closure.success();
273 }
274 
275 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) {
276   return (*e1)->compare(*e1,*e2);
277 }
278 
279 KlassInfoHisto::KlassInfoHisto(KlassInfoTable* cit) :
280   _cit(cit) {
281   _elements = new (mtServiceability) GrowableArray<KlassInfoEntry*>(_histo_initial_size, mtServiceability);
282 }
283 
284 KlassInfoHisto::~KlassInfoHisto() {
285   delete _elements;
286 }
287 
288 void KlassInfoHisto::add(KlassInfoEntry* cie) {
289   elements()->append(cie);
290 }
291 
292 void KlassInfoHisto::sort() {
293   elements()->sort(KlassInfoHisto::sort_helper);
294 }
295 
296 void KlassInfoHisto::print_elements(outputStream* st) const {
297   // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit
298   int64_t total = 0;
299   uint64_t totalw = 0;
300   for(int i=0; i < elements()->length(); i++) {
301     st->print("%4d: ", i+1);
302     elements()->at(i)->print_on(st);
303     total += elements()->at(i)->count();
304     totalw += elements()->at(i)->words();
305   }
306   st->print_cr("Total " INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13),
307                total, totalw * HeapWordSize);
308 }
309 
310 class HierarchyClosure : public KlassInfoClosure {
311 private:
312   GrowableArray<KlassInfoEntry*> *_elements;
313 public:
314   HierarchyClosure(GrowableArray<KlassInfoEntry*> *_elements) : _elements(_elements) {}
315 
316   void do_cinfo(KlassInfoEntry* cie) {
317     // ignore array classes
318     if (cie->klass()->is_instance_klass()) {
319       _elements->append(cie);
320     }
321   }
322 };
323 
324 void KlassHierarchy::print_class_hierarchy(outputStream* st, bool print_interfaces,
325                                            bool print_subclasses, char* classname) {
326   ResourceMark rm;
327   Stack <KlassInfoEntry*, mtClass> class_stack;
328   GrowableArray<KlassInfoEntry*> elements;
329 
330   // Add all classes to the KlassInfoTable, which allows for quick lookup.
331   // A KlassInfoEntry will be created for each class.
332   KlassInfoTable cit(true);
333   if (cit.allocation_failed()) {
334     st->print_cr("ERROR: Ran out of C-heap; hierarchy not generated");
335     return;
336   }
337 
338   // Add all created KlassInfoEntry instances to the elements array for easy
339   // iteration, and to allow each KlassInfoEntry instance to have a unique index.
340   HierarchyClosure hc(&elements);
341   cit.iterate(&hc);
342 
343   for(int i = 0; i < elements.length(); i++) {
344     KlassInfoEntry* cie = elements.at(i);
345     Klass* super = cie->klass()->super();
346 
347     // Set the index for the class.
348     cie->set_index(i + 1);
349 
350     // Add the class to the subclass array of its superclass.
351     if (super != nullptr) {
352       KlassInfoEntry* super_cie = cit.lookup(super);
353       assert(super_cie != nullptr, "could not lookup superclass");
354       super_cie->add_subclass(cie);
355     }
356   }
357 
358   // Set the do_print flag for each class that should be printed.
359   for(int i = 0; i < elements.length(); i++) {
360     KlassInfoEntry* cie = elements.at(i);
361     if (classname == nullptr) {
362       // We are printing all classes.
363       cie->set_do_print(true);
364     } else {
365       // We are only printing the hierarchy of a specific class.
366       if (strcmp(classname, cie->klass()->external_name()) == 0) {
367         KlassHierarchy::set_do_print_for_class_hierarchy(cie, &cit, print_subclasses);
368       }
369     }
370   }
371 
372   // Now we do a depth first traversal of the class hierachry. The class_stack will
373   // maintain the list of classes we still need to process. Start things off
374   // by priming it with java.lang.Object.
375   KlassInfoEntry* jlo_cie = cit.lookup(vmClasses::Object_klass());
376   assert(jlo_cie != nullptr, "could not lookup java.lang.Object");
377   class_stack.push(jlo_cie);
378 
379   // Repeatedly pop the top item off the stack, print its class info,
380   // and push all of its subclasses on to the stack. Do this until there
381   // are no classes left on the stack.
382   while (!class_stack.is_empty()) {
383     KlassInfoEntry* curr_cie = class_stack.pop();
384     if (curr_cie->do_print()) {
385       print_class(st, curr_cie, print_interfaces);
386       if (curr_cie->subclasses() != nullptr) {
387         // Current class has subclasses, so push all of them onto the stack.
388         for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
389           KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
390           if (cie->do_print()) {
391             class_stack.push(cie);
392           }
393         }
394       }
395     }
396   }
397 
398   st->flush();
399 }
400 
401 // Sets the do_print flag for every superclass and subclass of the specified class.
402 void KlassHierarchy::set_do_print_for_class_hierarchy(KlassInfoEntry* cie, KlassInfoTable* cit,
403                                                       bool print_subclasses) {
404   // Set do_print for all superclasses of this class.
405   Klass* super = ((InstanceKlass*)cie->klass())->java_super();
406   while (super != nullptr) {
407     KlassInfoEntry* super_cie = cit->lookup(super);
408     super_cie->set_do_print(true);
409     super = super->super();
410   }
411 
412   // Set do_print for this class and all of its subclasses.
413   Stack <KlassInfoEntry*, mtClass> class_stack;
414   class_stack.push(cie);
415   while (!class_stack.is_empty()) {
416     KlassInfoEntry* curr_cie = class_stack.pop();
417     curr_cie->set_do_print(true);
418     if (print_subclasses && curr_cie->subclasses() != nullptr) {
419       // Current class has subclasses, so push all of them onto the stack.
420       for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
421         KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
422         class_stack.push(cie);
423       }
424     }
425   }
426 }
427 
428 static void print_indent(outputStream* st, int indent) {
429   while (indent != 0) {
430     st->print("|");
431     indent--;
432     if (indent != 0) {
433       st->print("  ");
434     }
435   }
436 }
437 
438 // Print the class name and its unique ClassLoader identifier.
439 static void print_classname(outputStream* st, Klass* klass) {
440   oop loader_oop = klass->class_loader_data()->class_loader();
441   st->print("%s/", klass->external_name());
442   if (loader_oop == nullptr) {
443     st->print("null");
444   } else {
445     st->print(PTR_FORMAT, p2i(klass->class_loader_data()));
446   }
447 }
448 
449 static void print_interface(outputStream* st, InstanceKlass* intf_klass, const char* intf_type, int indent) {
450   print_indent(st, indent);
451   st->print("  implements ");
452   print_classname(st, intf_klass);
453   st->print(" (%s intf)\n", intf_type);
454 }
455 
456 void KlassHierarchy::print_class(outputStream* st, KlassInfoEntry* cie, bool print_interfaces) {
457   ResourceMark rm;
458   InstanceKlass* klass = (InstanceKlass*)cie->klass();
459   int indent = 0;
460 
461   // Print indentation with proper indicators of superclass.
462   Klass* super = klass->super();
463   while (super != nullptr) {
464     super = super->super();
465     indent++;
466   }
467   print_indent(st, indent);
468   if (indent != 0) st->print("--");
469 
470   // Print the class name, its unique ClassLoader identifier, and if it is an interface.
471   print_classname(st, klass);
472   if (klass->is_interface()) {
473     st->print(" (intf)");
474   }
475   st->print("\n");
476 
477   // Print any interfaces the class has.
478   if (print_interfaces) {
479     Array<InstanceKlass*>* local_intfs = klass->local_interfaces();
480     Array<InstanceKlass*>* trans_intfs = klass->transitive_interfaces();
481     for (int i = 0; i < local_intfs->length(); i++) {
482       print_interface(st, local_intfs->at(i), "declared", indent);
483     }
484     for (int i = 0; i < trans_intfs->length(); i++) {
485       InstanceKlass* trans_interface = trans_intfs->at(i);
486       // Only print transitive interfaces if they are not also declared.
487       if (!local_intfs->contains(trans_interface)) {
488         print_interface(st, trans_interface, "inherited", indent);
489       }
490     }
491   }
492 }
493 
494 void KlassInfoHisto::print_histo_on(outputStream* st) {
495   st->print_cr(" num     #instances         #bytes  class name (module)");
496   st->print_cr("-------------------------------------------------------");
497   print_elements(st);
498 }
499 
500 class HistoClosure : public KlassInfoClosure {
501  private:
502   KlassInfoHisto* _cih;
503  public:
504   HistoClosure(KlassInfoHisto* cih) : _cih(cih) {}
505 
506   void do_cinfo(KlassInfoEntry* cie) {
507     _cih->add(cie);
508   }
509 };
510 
511 class RecordInstanceClosure : public ObjectClosure {
512  private:
513   KlassInfoTable* _cit;
514   uintx _missed_count;
515   BoolObjectClosure* _filter;
516  public:
517   RecordInstanceClosure(KlassInfoTable* cit, BoolObjectClosure* filter) :
518     _cit(cit), _missed_count(0), _filter(filter) {}
519 
520   void do_object(oop obj) {
521     if (should_visit(obj)) {
522       if (!_cit->record_instance(obj)) {
523         _missed_count++;
524       }
525     }
526   }
527 
528   uintx missed_count() { return _missed_count; }
529 
530  private:
531   bool should_visit(oop obj) {
532     return _filter == nullptr || _filter->do_object_b(obj);
533   }
534 };
535 
536 // Heap inspection for every worker.
537 // When native OOM happens for KlassInfoTable, set _success to false.
538 void ParHeapInspectTask::work(uint worker_id) {
539   uintx missed_count = 0;
540   bool merge_success = true;
541   if (!Atomic::load(&_success)) {
542     // other worker has failed on parallel iteration.
543     return;
544   }
545 
546   KlassInfoTable cit(false);
547   if (cit.allocation_failed()) {
548     // fail to allocate memory, stop parallel mode
549     Atomic::store(&_success, false);
550     return;
551   }
552   RecordInstanceClosure ric(&cit, _filter);
553   _poi->object_iterate(&ric, worker_id);
554   missed_count = ric.missed_count();
555   {
556     MutexLocker x(&_mutex, Mutex::_no_safepoint_check_flag);
557     merge_success = _shared_cit->merge(&cit);
558   }
559   if (merge_success) {
560     Atomic::add(&_missed_count, missed_count);
561   } else {
562     Atomic::store(&_success, false);
563   }
564 }
565 
566 uintx HeapInspection::populate_table(KlassInfoTable* cit, BoolObjectClosure *filter, WorkerThreads* workers) {
567   // Try parallel first.
568   if (workers != nullptr) {
569     ResourceMark rm;
570     ParallelObjectIterator poi(workers->active_workers());
571     ParHeapInspectTask task(&poi, cit, filter);
572     // Run task with the active workers.
573     workers->run_task(&task);
574     if (task.success()) {
575       return task.missed_count();
576     }
577   }
578 
579   ResourceMark rm;
580   // If no parallel iteration available, run serially.
581   RecordInstanceClosure ric(cit, filter);
582   Universe::heap()->object_iterate(&ric);
583   return ric.missed_count();
584 }
585 
586 void HeapInspection::heap_inspection(outputStream* st, WorkerThreads* workers) {
587   ResourceMark rm;
588 
589   KlassInfoTable cit(false);
590   if (!cit.allocation_failed()) {
591     // populate table with object allocation info
592     uintx missed_count = populate_table(&cit, nullptr, workers);
593     if (missed_count != 0) {
594       log_info(gc, classhisto)("WARNING: Ran out of C-heap; undercounted %zu"
595                                " total instances in data below",
596                                missed_count);
597     }
598 
599     // Sort and print klass instance info
600     KlassInfoHisto histo(&cit);
601     HistoClosure hc(&histo);
602 
603     cit.iterate(&hc);
604 
605     histo.sort();
606     histo.print_histo_on(st);
607   } else {
608     st->print_cr("ERROR: Ran out of C-heap; histogram not generated");
609   }
610   st->flush();
611 }
612 
613 class FindInstanceClosure : public ObjectClosure {
614  private:
615   Klass* _klass;
616   GrowableArray<oop>* _result;
617 
618  public:
619   FindInstanceClosure(Klass* k, GrowableArray<oop>* result) : _klass(k), _result(result) {};
620 
621   void do_object(oop obj) {
622     if (obj->is_a(_klass)) {
623       // obj was read with AS_NO_KEEPALIVE, or equivalent.
624       // The object needs to be kept alive when it is published.
625       Universe::heap()->keep_alive(obj);
626 
627       _result->append(obj);
628     }
629   }
630 };
631 
632 void HeapInspection::find_instances_at_safepoint(Klass* k, GrowableArray<oop>* result) {
633   assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped");
634   assert(Heap_lock->is_locked(), "should have the Heap_lock");
635 
636   // Ensure that the heap is parsable
637   Universe::heap()->ensure_parsability(false);  // no need to retire TALBs
638 
639   // Iterate over objects in the heap
640   FindInstanceClosure fic(k, result);
641   Universe::heap()->object_iterate(&fic);
642 }