1 /*
  2  * Copyright (c) 2002, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/classLoaderData.inline.hpp"
 27 #include "classfile/classLoaderDataGraph.hpp"
 28 #include "classfile/moduleEntry.hpp"
 29 #include "classfile/vmClasses.hpp"
 30 #include "gc/shared/collectedHeap.hpp"
 31 #include "logging/log.hpp"
 32 #include "logging/logTag.hpp"
 33 #include "memory/heapInspection.hpp"
 34 #include "memory/resourceArea.hpp"
 35 #include "memory/universe.hpp"
 36 #include "nmt/memTracker.hpp"
 37 #include "oops/oop.inline.hpp"
 38 #include "runtime/atomic.hpp"
 39 #include "runtime/os.hpp"
 40 #include "utilities/globalDefinitions.hpp"
 41 #include "utilities/macros.hpp"
 42 #include "utilities/stack.inline.hpp"
 43 
 44 // HeapInspection
 45 
 46 inline KlassInfoEntry::~KlassInfoEntry() {
 47   if (_subclasses != nullptr) {
 48     delete _subclasses;
 49   }
 50 }
 51 
 52 inline void KlassInfoEntry::add_subclass(KlassInfoEntry* cie) {
 53   if (_subclasses == nullptr) {
 54     _subclasses = new (mtServiceability) GrowableArray<KlassInfoEntry*>(4, mtServiceability);
 55   }
 56   _subclasses->append(cie);
 57 }
 58 
 59 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) {
 60   if(e1->_instance_words > e2->_instance_words) {
 61     return -1;
 62   } else if(e1->_instance_words < e2->_instance_words) {
 63     return 1;
 64   }
 65   // Sort alphabetically, note 'Z' < '[' < 'a', but it's better to group
 66   // the array classes before all the instance classes.
 67   ResourceMark rm;
 68   const char* name1 = e1->klass()->external_name();
 69   const char* name2 = e2->klass()->external_name();
 70   bool d1 = (name1[0] == JVM_SIGNATURE_ARRAY);
 71   bool d2 = (name2[0] == JVM_SIGNATURE_ARRAY);
 72   if (d1 && !d2) {
 73     return -1;
 74   } else if (d2 && !d1) {
 75     return 1;
 76   } else {
 77     return strcmp(name1, name2);
 78   }
 79 }
 80 
 81 const char* KlassInfoEntry::name() const {
 82   const char* name;
 83   if (_klass->name() != nullptr) {
 84     name = _klass->external_name();
 85   } else {
 86     if (_klass == Universe::boolArrayKlassObj())         name = "<boolArrayKlass>";         else
 87     if (_klass == Universe::charArrayKlassObj())         name = "<charArrayKlass>";         else
 88     if (_klass == Universe::floatArrayKlassObj())        name = "<floatArrayKlass>";        else
 89     if (_klass == Universe::doubleArrayKlassObj())       name = "<doubleArrayKlass>";       else
 90     if (_klass == Universe::byteArrayKlassObj())         name = "<byteArrayKlass>";         else
 91     if (_klass == Universe::shortArrayKlassObj())        name = "<shortArrayKlass>";        else
 92     if (_klass == Universe::intArrayKlassObj())          name = "<intArrayKlass>";          else
 93     if (_klass == Universe::longArrayKlassObj())         name = "<longArrayKlass>";         else
 94       name = "<no name>";
 95   }
 96   return name;
 97 }
 98 
 99 void KlassInfoEntry::print_on(outputStream* st) const {
100   ResourceMark rm;
101 
102   // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit
103   ModuleEntry* module = _klass->module();
104   if (module->is_named()) {
105     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s (%s%s%s)",
106                  (int64_t)_instance_count,
107                  (uint64_t)_instance_words * HeapWordSize,
108                  name(),
109                  module->name()->as_C_string(),
110                  module->version() != nullptr ? "@" : "",
111                  module->version() != nullptr ? module->version()->as_C_string() : "");
112   } else {
113     st->print_cr(INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13) "  %s",
114                  (int64_t)_instance_count,
115                  (uint64_t)_instance_words * HeapWordSize,
116                  name());
117   }
118 }
119 
120 KlassInfoEntry* KlassInfoBucket::lookup(Klass* const k) {
121   // Can happen if k is an archived class that we haven't loaded yet.
122   if (k->java_mirror_no_keepalive() == nullptr) {
123     return nullptr;
124   }
125 
126   KlassInfoEntry* elt = _list;
127   while (elt != nullptr) {
128     if (elt->is_equal(k)) {
129       return elt;
130     }
131     elt = elt->next();
132   }
133   elt = new (std::nothrow) KlassInfoEntry(k, list());
134   // We may be out of space to allocate the new entry.
135   if (elt != nullptr) {
136     set_list(elt);
137   }
138   return elt;
139 }
140 
141 void KlassInfoBucket::iterate(KlassInfoClosure* cic) {
142   KlassInfoEntry* elt = _list;
143   while (elt != nullptr) {
144     cic->do_cinfo(elt);
145     elt = elt->next();
146   }
147 }
148 
149 void KlassInfoBucket::empty() {
150   KlassInfoEntry* elt = _list;
151   _list = nullptr;
152   while (elt != nullptr) {
153     KlassInfoEntry* next = elt->next();
154     delete elt;
155     elt = next;
156   }
157 }
158 
159 class KlassInfoTable::AllClassesFinder : public LockedClassesDo {
160   KlassInfoTable *_table;
161 public:
162   AllClassesFinder(KlassInfoTable* table) : _table(table) {}
163   virtual void do_klass(Klass* k) {
164     // This has the SIDE EFFECT of creating a KlassInfoEntry
165     // for <k>, if one doesn't exist yet.
166     _table->lookup(k);
167   }
168 };
169 
170 
171 KlassInfoTable::KlassInfoTable(bool add_all_classes) {
172   _size_of_instances_in_words = 0;
173   _ref = (HeapWord*) Universe::boolArrayKlassObj();
174   _buckets =
175     (KlassInfoBucket*)  AllocateHeap(sizeof(KlassInfoBucket) * _num_buckets,
176        mtInternal, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
177   if (_buckets != nullptr) {
178     for (int index = 0; index < _num_buckets; index++) {
179       _buckets[index].initialize();
180     }
181     if (add_all_classes) {
182       AllClassesFinder finder(this);
183       ClassLoaderDataGraph::classes_do(&finder);
184     }
185   }
186 }
187 
188 KlassInfoTable::~KlassInfoTable() {
189   if (_buckets != nullptr) {
190     for (int index = 0; index < _num_buckets; index++) {
191       _buckets[index].empty();
192     }
193     FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets);
194     _buckets = nullptr;
195   }
196 }
197 
198 uint KlassInfoTable::hash(const Klass* p) {
199   return (uint)(((uintptr_t)p - (uintptr_t)_ref) >> 2);
200 }
201 
202 KlassInfoEntry* KlassInfoTable::lookup(Klass* k) {
203   uint         idx = hash(k) % _num_buckets;
204   assert(_buckets != nullptr, "Allocation failure should have been caught");
205   KlassInfoEntry*  e   = _buckets[idx].lookup(k);
206   // Lookup may fail if this is a new klass for which we
207   // could not allocate space for an new entry, or if it's
208   // an archived class that we haven't loaded yet.
209   assert(e == nullptr || k == e->klass(), "must be equal");
210   return e;
211 }
212 
213 // Return false if the entry could not be recorded on account
214 // of running out of space required to create a new entry.
215 bool KlassInfoTable::record_instance(const oop obj) {
216   Klass*        k = obj->klass();
217   KlassInfoEntry* elt = lookup(k);
218   // elt may be null if it's a new klass for which we
219   // could not allocate space for a new entry in the hashtable.
220   if (elt != nullptr) {
221     elt->set_count(elt->count() + 1);
222     elt->set_words(elt->words() + obj->size());
223     _size_of_instances_in_words += obj->size();
224     return true;
225   } else {
226     return false;
227   }
228 }
229 
230 void KlassInfoTable::iterate(KlassInfoClosure* cic) {
231   assert(_buckets != nullptr, "Allocation failure should have been caught");
232   for (int index = 0; index < _num_buckets; index++) {
233     _buckets[index].iterate(cic);
234   }
235 }
236 
237 size_t KlassInfoTable::size_of_instances_in_words() const {
238   return _size_of_instances_in_words;
239 }
240 
241 // Return false if the entry could not be recorded on account
242 // of running out of space required to create a new entry.
243 bool KlassInfoTable::merge_entry(const KlassInfoEntry* cie) {
244   Klass*          k = cie->klass();
245   KlassInfoEntry* elt = lookup(k);
246   // elt may be null if it's a new klass for which we
247   // could not allocate space for a new entry in the hashtable.
248   if (elt != nullptr) {
249     elt->set_count(elt->count() + cie->count());
250     elt->set_words(elt->words() + cie->words());
251     _size_of_instances_in_words += cie->words();
252     return true;
253   }
254   return false;
255 }
256 
257 class KlassInfoTableMergeClosure : public KlassInfoClosure {
258 private:
259   KlassInfoTable* _dest;
260   bool _success;
261 public:
262   KlassInfoTableMergeClosure(KlassInfoTable* table) : _dest(table), _success(true) {}
263   void do_cinfo(KlassInfoEntry* cie) {
264     _success &= _dest->merge_entry(cie);
265   }
266   bool success() { return _success; }
267 };
268 
269 // merge from table
270 bool KlassInfoTable::merge(KlassInfoTable* table) {
271   KlassInfoTableMergeClosure closure(this);
272   table->iterate(&closure);
273   return closure.success();
274 }
275 
276 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) {
277   return (*e1)->compare(*e1,*e2);
278 }
279 
280 KlassInfoHisto::KlassInfoHisto(KlassInfoTable* cit) :
281   _cit(cit) {
282   _elements = new (mtServiceability) GrowableArray<KlassInfoEntry*>(_histo_initial_size, mtServiceability);
283 }
284 
285 KlassInfoHisto::~KlassInfoHisto() {
286   delete _elements;
287 }
288 
289 void KlassInfoHisto::add(KlassInfoEntry* cie) {
290   elements()->append(cie);
291 }
292 
293 void KlassInfoHisto::sort() {
294   elements()->sort(KlassInfoHisto::sort_helper);
295 }
296 
297 void KlassInfoHisto::print_elements(outputStream* st) const {
298   // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit
299   int64_t total = 0;
300   uint64_t totalw = 0;
301   for(int i=0; i < elements()->length(); i++) {
302     st->print("%4d: ", i+1);
303     elements()->at(i)->print_on(st);
304     total += elements()->at(i)->count();
305     totalw += elements()->at(i)->words();
306   }
307   st->print_cr("Total " INT64_FORMAT_W(13) "  " UINT64_FORMAT_W(13),
308                total, totalw * HeapWordSize);
309 }
310 
311 class HierarchyClosure : public KlassInfoClosure {
312 private:
313   GrowableArray<KlassInfoEntry*> *_elements;
314 public:
315   HierarchyClosure(GrowableArray<KlassInfoEntry*> *_elements) : _elements(_elements) {}
316 
317   void do_cinfo(KlassInfoEntry* cie) {
318     // ignore array classes
319     if (cie->klass()->is_instance_klass()) {
320       _elements->append(cie);
321     }
322   }
323 };
324 
325 void KlassHierarchy::print_class_hierarchy(outputStream* st, bool print_interfaces,
326                                            bool print_subclasses, char* classname) {
327   ResourceMark rm;
328   Stack <KlassInfoEntry*, mtClass> class_stack;
329   GrowableArray<KlassInfoEntry*> elements;
330 
331   // Add all classes to the KlassInfoTable, which allows for quick lookup.
332   // A KlassInfoEntry will be created for each class.
333   KlassInfoTable cit(true);
334   if (cit.allocation_failed()) {
335     st->print_cr("ERROR: Ran out of C-heap; hierarchy not generated");
336     return;
337   }
338 
339   // Add all created KlassInfoEntry instances to the elements array for easy
340   // iteration, and to allow each KlassInfoEntry instance to have a unique index.
341   HierarchyClosure hc(&elements);
342   cit.iterate(&hc);
343 
344   for(int i = 0; i < elements.length(); i++) {
345     KlassInfoEntry* cie = elements.at(i);
346     Klass* super = cie->klass()->super();
347 
348     // Set the index for the class.
349     cie->set_index(i + 1);
350 
351     // Add the class to the subclass array of its superclass.
352     if (super != nullptr) {
353       KlassInfoEntry* super_cie = cit.lookup(super);
354       assert(super_cie != nullptr, "could not lookup superclass");
355       super_cie->add_subclass(cie);
356     }
357   }
358 
359   // Set the do_print flag for each class that should be printed.
360   for(int i = 0; i < elements.length(); i++) {
361     KlassInfoEntry* cie = elements.at(i);
362     if (classname == nullptr) {
363       // We are printing all classes.
364       cie->set_do_print(true);
365     } else {
366       // We are only printing the hierarchy of a specific class.
367       if (strcmp(classname, cie->klass()->external_name()) == 0) {
368         KlassHierarchy::set_do_print_for_class_hierarchy(cie, &cit, print_subclasses);
369       }
370     }
371   }
372 
373   // Now we do a depth first traversal of the class hierachry. The class_stack will
374   // maintain the list of classes we still need to process. Start things off
375   // by priming it with java.lang.Object.
376   KlassInfoEntry* jlo_cie = cit.lookup(vmClasses::Object_klass());
377   assert(jlo_cie != nullptr, "could not lookup java.lang.Object");
378   class_stack.push(jlo_cie);
379 
380   // Repeatedly pop the top item off the stack, print its class info,
381   // and push all of its subclasses on to the stack. Do this until there
382   // are no classes left on the stack.
383   while (!class_stack.is_empty()) {
384     KlassInfoEntry* curr_cie = class_stack.pop();
385     if (curr_cie->do_print()) {
386       print_class(st, curr_cie, print_interfaces);
387       if (curr_cie->subclasses() != nullptr) {
388         // Current class has subclasses, so push all of them onto the stack.
389         for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
390           KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
391           if (cie->do_print()) {
392             class_stack.push(cie);
393           }
394         }
395       }
396     }
397   }
398 
399   st->flush();
400 }
401 
402 // Sets the do_print flag for every superclass and subclass of the specified class.
403 void KlassHierarchy::set_do_print_for_class_hierarchy(KlassInfoEntry* cie, KlassInfoTable* cit,
404                                                       bool print_subclasses) {
405   // Set do_print for all superclasses of this class.
406   Klass* super = ((InstanceKlass*)cie->klass())->java_super();
407   while (super != nullptr) {
408     KlassInfoEntry* super_cie = cit->lookup(super);
409     super_cie->set_do_print(true);
410     super = super->super();
411   }
412 
413   // Set do_print for this class and all of its subclasses.
414   Stack <KlassInfoEntry*, mtClass> class_stack;
415   class_stack.push(cie);
416   while (!class_stack.is_empty()) {
417     KlassInfoEntry* curr_cie = class_stack.pop();
418     curr_cie->set_do_print(true);
419     if (print_subclasses && curr_cie->subclasses() != nullptr) {
420       // Current class has subclasses, so push all of them onto the stack.
421       for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
422         KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
423         class_stack.push(cie);
424       }
425     }
426   }
427 }
428 
429 static void print_indent(outputStream* st, int indent) {
430   while (indent != 0) {
431     st->print("|");
432     indent--;
433     if (indent != 0) {
434       st->print("  ");
435     }
436   }
437 }
438 
439 // Print the class name and its unique ClassLoader identifier.
440 static void print_classname(outputStream* st, Klass* klass) {
441   oop loader_oop = klass->class_loader_data()->class_loader();
442   st->print("%s/", klass->external_name());
443   if (loader_oop == nullptr) {
444     st->print("null");
445   } else {
446     st->print(PTR_FORMAT, p2i(klass->class_loader_data()));
447   }
448 }
449 
450 static void print_interface(outputStream* st, InstanceKlass* intf_klass, const char* intf_type, int indent) {
451   print_indent(st, indent);
452   st->print("  implements ");
453   print_classname(st, intf_klass);
454   st->print(" (%s intf)\n", intf_type);
455 }
456 
457 void KlassHierarchy::print_class(outputStream* st, KlassInfoEntry* cie, bool print_interfaces) {
458   ResourceMark rm;
459   InstanceKlass* klass = (InstanceKlass*)cie->klass();
460   int indent = 0;
461 
462   // Print indentation with proper indicators of superclass.
463   Klass* super = klass->super();
464   while (super != nullptr) {
465     super = super->super();
466     indent++;
467   }
468   print_indent(st, indent);
469   if (indent != 0) st->print("--");
470 
471   // Print the class name, its unique ClassLoader identifier, and if it is an interface.
472   print_classname(st, klass);
473   if (klass->is_interface()) {
474     st->print(" (intf)");
475   }
476   st->print("\n");
477 
478   // Print any interfaces the class has.
479   if (print_interfaces) {
480     Array<InstanceKlass*>* local_intfs = klass->local_interfaces();
481     Array<InstanceKlass*>* trans_intfs = klass->transitive_interfaces();
482     for (int i = 0; i < local_intfs->length(); i++) {
483       print_interface(st, local_intfs->at(i), "declared", indent);
484     }
485     for (int i = 0; i < trans_intfs->length(); i++) {
486       InstanceKlass* trans_interface = trans_intfs->at(i);
487       // Only print transitive interfaces if they are not also declared.
488       if (!local_intfs->contains(trans_interface)) {
489         print_interface(st, trans_interface, "inherited", indent);
490       }
491     }
492   }
493 }
494 
495 void KlassInfoHisto::print_histo_on(outputStream* st) {
496   st->print_cr(" num     #instances         #bytes  class name (module)");
497   st->print_cr("-------------------------------------------------------");
498   print_elements(st);
499 }
500 
501 class HistoClosure : public KlassInfoClosure {
502  private:
503   KlassInfoHisto* _cih;
504  public:
505   HistoClosure(KlassInfoHisto* cih) : _cih(cih) {}
506 
507   void do_cinfo(KlassInfoEntry* cie) {
508     _cih->add(cie);
509   }
510 };
511 
512 class RecordInstanceClosure : public ObjectClosure {
513  private:
514   KlassInfoTable* _cit;
515   uintx _missed_count;
516   BoolObjectClosure* _filter;
517  public:
518   RecordInstanceClosure(KlassInfoTable* cit, BoolObjectClosure* filter) :
519     _cit(cit), _missed_count(0), _filter(filter) {}
520 
521   void do_object(oop obj) {
522     if (should_visit(obj)) {
523       if (!_cit->record_instance(obj)) {
524         _missed_count++;
525       }
526     }
527   }
528 
529   uintx missed_count() { return _missed_count; }
530 
531  private:
532   bool should_visit(oop obj) {
533     return _filter == nullptr || _filter->do_object_b(obj);
534   }
535 };
536 
537 // Heap inspection for every worker.
538 // When native OOM happens for KlassInfoTable, set _success to false.
539 void ParHeapInspectTask::work(uint worker_id) {
540   uintx missed_count = 0;
541   bool merge_success = true;
542   if (!Atomic::load(&_success)) {
543     // other worker has failed on parallel iteration.
544     return;
545   }
546 
547   KlassInfoTable cit(false);
548   if (cit.allocation_failed()) {
549     // fail to allocate memory, stop parallel mode
550     Atomic::store(&_success, false);
551     return;
552   }
553   RecordInstanceClosure ric(&cit, _filter);
554   _poi->object_iterate(&ric, worker_id);
555   missed_count = ric.missed_count();
556   {
557     MutexLocker x(&_mutex, Mutex::_no_safepoint_check_flag);
558     merge_success = _shared_cit->merge(&cit);
559   }
560   if (merge_success) {
561     Atomic::add(&_missed_count, missed_count);
562   } else {
563     Atomic::store(&_success, false);
564   }
565 }
566 
567 uintx HeapInspection::populate_table(KlassInfoTable* cit, BoolObjectClosure *filter, WorkerThreads* workers) {
568   // Try parallel first.
569   if (workers != nullptr) {
570     ResourceMark rm;
571     ParallelObjectIterator poi(workers->active_workers());
572     ParHeapInspectTask task(&poi, cit, filter);
573     // Run task with the active workers.
574     workers->run_task(&task);
575     if (task.success()) {
576       return task.missed_count();
577     }
578   }
579 
580   ResourceMark rm;
581   // If no parallel iteration available, run serially.
582   RecordInstanceClosure ric(cit, filter);
583   Universe::heap()->object_iterate(&ric);
584   return ric.missed_count();
585 }
586 
587 void HeapInspection::heap_inspection(outputStream* st, WorkerThreads* workers) {
588   ResourceMark rm;
589 
590   KlassInfoTable cit(false);
591   if (!cit.allocation_failed()) {
592     // populate table with object allocation info
593     uintx missed_count = populate_table(&cit, nullptr, workers);
594     if (missed_count != 0) {
595       log_info(gc, classhisto)("WARNING: Ran out of C-heap; undercounted " UINTX_FORMAT
596                                " total instances in data below",
597                                missed_count);
598     }
599 
600     // Sort and print klass instance info
601     KlassInfoHisto histo(&cit);
602     HistoClosure hc(&histo);
603 
604     cit.iterate(&hc);
605 
606     histo.sort();
607     histo.print_histo_on(st);
608   } else {
609     st->print_cr("ERROR: Ran out of C-heap; histogram not generated");
610   }
611   st->flush();
612 }
613 
614 class FindInstanceClosure : public ObjectClosure {
615  private:
616   Klass* _klass;
617   GrowableArray<oop>* _result;
618 
619  public:
620   FindInstanceClosure(Klass* k, GrowableArray<oop>* result) : _klass(k), _result(result) {};
621 
622   void do_object(oop obj) {
623     if (obj->is_a(_klass)) {
624       // obj was read with AS_NO_KEEPALIVE, or equivalent.
625       // The object needs to be kept alive when it is published.
626       Universe::heap()->keep_alive(obj);
627 
628       _result->append(obj);
629     }
630   }
631 };
632 
633 void HeapInspection::find_instances_at_safepoint(Klass* k, GrowableArray<oop>* result) {
634   assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped");
635   assert(Heap_lock->is_locked(), "should have the Heap_lock");
636 
637   // Ensure that the heap is parsable
638   Universe::heap()->ensure_parsability(false);  // no need to retire TALBs
639 
640   // Iterate over objects in the heap
641   FindInstanceClosure fic(k, result);
642   Universe::heap()->object_iterate(&fic);
643 }