1 /*
2 * Copyright (c) 2002, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classLoaderData.inline.hpp"
26 #include "classfile/classLoaderDataGraph.hpp"
27 #include "classfile/moduleEntry.hpp"
28 #include "classfile/vmClasses.hpp"
29 #include "gc/shared/collectedHeap.hpp"
30 #include "logging/log.hpp"
31 #include "logging/logTag.hpp"
32 #include "memory/heapInspection.hpp"
33 #include "memory/resourceArea.hpp"
34 #include "memory/universe.hpp"
35 #include "nmt/memTracker.hpp"
36 #include "oops/oop.inline.hpp"
37 #include "runtime/atomicAccess.hpp"
38 #include "runtime/os.hpp"
39 #include "utilities/globalDefinitions.hpp"
40 #include "utilities/macros.hpp"
41 #include "utilities/stack.inline.hpp"
42
43 // HeapInspection
44
45 inline KlassInfoEntry::~KlassInfoEntry() {
46 if (_subclasses != nullptr) {
47 delete _subclasses;
48 }
49 }
50
51 inline void KlassInfoEntry::add_subclass(KlassInfoEntry* cie) {
52 if (_subclasses == nullptr) {
53 _subclasses = new (mtServiceability) GrowableArray<KlassInfoEntry*>(4, mtServiceability);
54 }
55 _subclasses->append(cie);
56 }
57
58 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) {
59 if(e1->_instance_words > e2->_instance_words) {
60 return -1;
61 } else if(e1->_instance_words < e2->_instance_words) {
62 return 1;
63 }
64 // Sort alphabetically, note 'Z' < '[' < 'a', but it's better to group
65 // the array classes before all the instance classes.
66 ResourceMark rm;
67 const char* name1 = e1->klass()->external_name();
68 const char* name2 = e2->klass()->external_name();
69 bool d1 = (name1[0] == JVM_SIGNATURE_ARRAY);
70 bool d2 = (name2[0] == JVM_SIGNATURE_ARRAY);
71 if (d1 && !d2) {
72 return -1;
73 } else if (d2 && !d1) {
74 return 1;
75 } else {
76 return strcmp(name1, name2);
77 }
78 }
79
80 const char* KlassInfoEntry::name() const {
81 const char* name;
82 if (_klass->name() != nullptr) {
83 name = _klass->external_name();
84 } else {
85 if (_klass == Universe::boolArrayKlass()) name = "<boolArrayKlass>"; else
86 if (_klass == Universe::charArrayKlass()) name = "<charArrayKlass>"; else
87 if (_klass == Universe::floatArrayKlass()) name = "<floatArrayKlass>"; else
88 if (_klass == Universe::doubleArrayKlass()) name = "<doubleArrayKlass>"; else
89 if (_klass == Universe::byteArrayKlass()) name = "<byteArrayKlass>"; else
90 if (_klass == Universe::shortArrayKlass()) name = "<shortArrayKlass>"; else
91 if (_klass == Universe::intArrayKlass()) name = "<intArrayKlass>"; else
92 if (_klass == Universe::longArrayKlass()) name = "<longArrayKlass>"; else
93 name = "<no name>";
94 }
95 return name;
96 }
97
98 void KlassInfoEntry::print_on(outputStream* st) const {
99 ResourceMark rm;
100
101 // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit
102 ModuleEntry* module = _klass->module();
103 if (module->is_named()) {
104 st->print_cr(INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13) " %s (%s%s%s)",
105 (int64_t)_instance_count,
106 (uint64_t)_instance_words * HeapWordSize,
107 name(),
108 module->name()->as_C_string(),
109 module->version() != nullptr ? "@" : "",
110 module->version() != nullptr ? module->version()->as_C_string() : "");
111 } else {
112 st->print_cr(INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13) " %s",
113 (int64_t)_instance_count,
114 (uint64_t)_instance_words * HeapWordSize,
115 name());
116 }
117 }
118
119 KlassInfoEntry* KlassInfoBucket::lookup(Klass* const k) {
120 // Can happen if k is an archived class that we haven't loaded yet.
121 if (k->java_mirror_no_keepalive() == nullptr) {
122 return nullptr;
123 }
124
125 KlassInfoEntry* elt = _list;
126 while (elt != nullptr) {
127 if (elt->is_equal(k)) {
128 return elt;
129 }
130 elt = elt->next();
131 }
132 elt = new (std::nothrow) KlassInfoEntry(k, list());
133 // We may be out of space to allocate the new entry.
134 if (elt != nullptr) {
135 set_list(elt);
136 }
137 return elt;
138 }
139
140 void KlassInfoBucket::iterate(KlassInfoClosure* cic) {
141 KlassInfoEntry* elt = _list;
142 while (elt != nullptr) {
143 cic->do_cinfo(elt);
144 elt = elt->next();
145 }
146 }
147
148 void KlassInfoBucket::empty() {
149 KlassInfoEntry* elt = _list;
150 _list = nullptr;
151 while (elt != nullptr) {
152 KlassInfoEntry* next = elt->next();
153 delete elt;
154 elt = next;
155 }
156 }
157
158 class KlassInfoTable::AllClassesFinder : public LockedClassesDo {
159 KlassInfoTable *_table;
160 public:
161 AllClassesFinder(KlassInfoTable* table) : _table(table) {}
162 virtual void do_klass(Klass* k) {
163 // This has the SIDE EFFECT of creating a KlassInfoEntry
164 // for <k>, if one doesn't exist yet.
165 _table->lookup(k);
166 }
167 };
168
169
170 KlassInfoTable::KlassInfoTable(bool add_all_classes) {
171 _size_of_instances_in_words = 0;
172 _ref = (uintptr_t) Universe::boolArrayKlass();
173 _buckets =
174 (KlassInfoBucket*) AllocateHeap(sizeof(KlassInfoBucket) * _num_buckets,
175 mtInternal, CURRENT_PC, AllocFailStrategy::RETURN_NULL);
176 if (_buckets != nullptr) {
177 for (int index = 0; index < _num_buckets; index++) {
178 _buckets[index].initialize();
179 }
180 if (add_all_classes) {
181 AllClassesFinder finder(this);
182 ClassLoaderDataGraph::classes_do(&finder);
183 }
184 }
185 }
186
187 KlassInfoTable::~KlassInfoTable() {
188 if (_buckets != nullptr) {
189 for (int index = 0; index < _num_buckets; index++) {
190 _buckets[index].empty();
191 }
192 FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets);
193 _buckets = nullptr;
194 }
195 }
196
197 uint KlassInfoTable::hash(const Klass* p) {
198 return (uint)(((uintptr_t)p - _ref) >> 2);
199 }
200
201 KlassInfoEntry* KlassInfoTable::lookup(Klass* k) {
202 uint idx = hash(k) % _num_buckets;
203 assert(_buckets != nullptr, "Allocation failure should have been caught");
204 KlassInfoEntry* e = _buckets[idx].lookup(k);
205 // Lookup may fail if this is a new klass for which we
206 // could not allocate space for an new entry, or if it's
207 // an archived class that we haven't loaded yet.
208 assert(e == nullptr || k == e->klass(), "must be equal");
209 return e;
210 }
211
212 // Return false if the entry could not be recorded on account
213 // of running out of space required to create a new entry.
214 bool KlassInfoTable::record_instance(const oop obj) {
215 Klass* k = obj->klass();
216 KlassInfoEntry* elt = lookup(k);
217 // elt may be null if it's a new klass for which we
218 // could not allocate space for a new entry in the hashtable.
219 if (elt != nullptr) {
220 elt->set_count(elt->count() + 1);
221 elt->set_words(elt->words() + obj->size());
222 _size_of_instances_in_words += obj->size();
223 return true;
224 } else {
225 return false;
226 }
227 }
228
229 void KlassInfoTable::iterate(KlassInfoClosure* cic) {
230 assert(_buckets != nullptr, "Allocation failure should have been caught");
231 for (int index = 0; index < _num_buckets; index++) {
232 _buckets[index].iterate(cic);
233 }
234 }
235
236 size_t KlassInfoTable::size_of_instances_in_words() const {
237 return _size_of_instances_in_words;
238 }
239
240 // Return false if the entry could not be recorded on account
241 // of running out of space required to create a new entry.
242 bool KlassInfoTable::merge_entry(const KlassInfoEntry* cie) {
243 Klass* k = cie->klass();
244 KlassInfoEntry* elt = lookup(k);
245 // elt may be null if it's a new klass for which we
246 // could not allocate space for a new entry in the hashtable.
247 if (elt != nullptr) {
248 elt->set_count(elt->count() + cie->count());
249 elt->set_words(elt->words() + cie->words());
250 _size_of_instances_in_words += cie->words();
251 return true;
252 }
253 return false;
254 }
255
256 class KlassInfoTableMergeClosure : public KlassInfoClosure {
257 private:
258 KlassInfoTable* _dest;
259 bool _success;
260 public:
261 KlassInfoTableMergeClosure(KlassInfoTable* table) : _dest(table), _success(true) {}
262 void do_cinfo(KlassInfoEntry* cie) {
263 _success &= _dest->merge_entry(cie);
264 }
265 bool success() { return _success; }
266 };
267
268 // merge from table
269 bool KlassInfoTable::merge(KlassInfoTable* table) {
270 KlassInfoTableMergeClosure closure(this);
271 table->iterate(&closure);
272 return closure.success();
273 }
274
275 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) {
276 return (*e1)->compare(*e1,*e2);
277 }
278
279 KlassInfoHisto::KlassInfoHisto(KlassInfoTable* cit) :
280 _cit(cit) {
281 _elements = new (mtServiceability) GrowableArray<KlassInfoEntry*>(_histo_initial_size, mtServiceability);
282 }
283
284 KlassInfoHisto::~KlassInfoHisto() {
285 delete _elements;
286 }
287
288 void KlassInfoHisto::add(KlassInfoEntry* cie) {
289 elements()->append(cie);
290 }
291
292 void KlassInfoHisto::sort() {
293 elements()->sort(KlassInfoHisto::sort_helper);
294 }
295
296 void KlassInfoHisto::print_elements(outputStream* st) const {
297 // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit
298 int64_t total = 0;
299 uint64_t totalw = 0;
300 for(int i=0; i < elements()->length(); i++) {
301 st->print("%4d: ", i+1);
302 elements()->at(i)->print_on(st);
303 total += elements()->at(i)->count();
304 totalw += elements()->at(i)->words();
305 }
306 st->print_cr("Total " INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13),
307 total, totalw * HeapWordSize);
308 }
309
310 class HierarchyClosure : public KlassInfoClosure {
311 private:
312 GrowableArray<KlassInfoEntry*> *_elements;
313 public:
314 HierarchyClosure(GrowableArray<KlassInfoEntry*> *_elements) : _elements(_elements) {}
315
316 void do_cinfo(KlassInfoEntry* cie) {
317 // ignore array classes
318 if (cie->klass()->is_instance_klass()) {
319 _elements->append(cie);
320 }
321 }
322 };
323
324 void KlassHierarchy::print_class_hierarchy(outputStream* st, bool print_interfaces,
325 bool print_subclasses, char* classname) {
326 ResourceMark rm;
327 Stack <KlassInfoEntry*, mtClass> class_stack;
328 GrowableArray<KlassInfoEntry*> elements;
329
330 // Add all classes to the KlassInfoTable, which allows for quick lookup.
331 // A KlassInfoEntry will be created for each class.
332 KlassInfoTable cit(true);
333 if (cit.allocation_failed()) {
334 st->print_cr("ERROR: Ran out of C-heap; hierarchy not generated");
335 return;
336 }
337
338 // Add all created KlassInfoEntry instances to the elements array for easy
339 // iteration, and to allow each KlassInfoEntry instance to have a unique index.
340 HierarchyClosure hc(&elements);
341 cit.iterate(&hc);
342
343 for(int i = 0; i < elements.length(); i++) {
344 KlassInfoEntry* cie = elements.at(i);
345 Klass* super = cie->klass()->super();
346
347 // Set the index for the class.
348 cie->set_index(i + 1);
349
350 // Add the class to the subclass array of its superclass.
351 if (super != nullptr) {
352 KlassInfoEntry* super_cie = cit.lookup(super);
353 assert(super_cie != nullptr, "could not lookup superclass");
354 super_cie->add_subclass(cie);
355 }
356 }
357
358 // Set the do_print flag for each class that should be printed.
359 for(int i = 0; i < elements.length(); i++) {
360 KlassInfoEntry* cie = elements.at(i);
361 if (classname == nullptr) {
362 // We are printing all classes.
363 cie->set_do_print(true);
364 } else {
365 // We are only printing the hierarchy of a specific class.
366 if (strcmp(classname, cie->klass()->external_name()) == 0) {
367 assert(cie->klass()->is_instance_klass(), "elements array contains only instance klasses");
368 KlassHierarchy::set_do_print_for_class_hierarchy(cie, &cit, print_subclasses);
369 }
370 }
371 }
372
373 // Now we do a depth first traversal of the class hierachry. The class_stack will
374 // maintain the list of classes we still need to process. Start things off
375 // by priming it with java.lang.Object.
376 KlassInfoEntry* jlo_cie = cit.lookup(vmClasses::Object_klass());
377 assert(jlo_cie != nullptr, "could not lookup java.lang.Object");
378 class_stack.push(jlo_cie);
379
380 // Repeatedly pop the top item off the stack, print its class info,
381 // and push all of its subclasses on to the stack. Do this until there
382 // are no classes left on the stack.
383 while (!class_stack.is_empty()) {
384 KlassInfoEntry* curr_cie = class_stack.pop();
385 if (curr_cie->do_print()) {
386 print_class(st, curr_cie, print_interfaces);
387 if (curr_cie->subclasses() != nullptr) {
388 // Current class has subclasses, so push all of them onto the stack.
389 for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
390 KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
391 if (cie->do_print()) {
392 class_stack.push(cie);
393 }
394 }
395 }
396 }
397 }
398
399 st->flush();
400 }
401
402 // Sets the do_print flag for every superclass and subclass of the specified class.
403 void KlassHierarchy::set_do_print_for_class_hierarchy(KlassInfoEntry* cie, KlassInfoTable* cit,
404 bool print_subclasses) {
405 // Set do_print for all superclasses of this class.
406 InstanceKlass* super = InstanceKlass::cast(cie->klass())->super();
407 while (super != nullptr) {
408 KlassInfoEntry* super_cie = cit->lookup(super);
409 super_cie->set_do_print(true);
410 super = super->super();
411 }
412
413 // Set do_print for this class and all of its subclasses.
414 Stack <KlassInfoEntry*, mtClass> class_stack;
415 class_stack.push(cie);
416 while (!class_stack.is_empty()) {
417 KlassInfoEntry* curr_cie = class_stack.pop();
418 curr_cie->set_do_print(true);
419 if (print_subclasses && curr_cie->subclasses() != nullptr) {
420 // Current class has subclasses, so push all of them onto the stack.
421 for (int i = 0; i < curr_cie->subclasses()->length(); i++) {
422 KlassInfoEntry* cie = curr_cie->subclasses()->at(i);
423 class_stack.push(cie);
424 }
425 }
426 }
427 }
428
429 static void print_indent(outputStream* st, int indent) {
430 while (indent != 0) {
431 st->print("|");
432 indent--;
433 if (indent != 0) {
434 st->print(" ");
435 }
436 }
437 }
438
439 // Print the class name and its unique ClassLoader identifier.
440 static void print_classname(outputStream* st, Klass* klass) {
441 oop loader_oop = klass->class_loader_data()->class_loader();
442 st->print("%s/", klass->external_name());
443 if (loader_oop == nullptr) {
444 st->print("null");
445 } else {
446 st->print(PTR_FORMAT, p2i(klass->class_loader_data()));
447 }
448 }
449
450 static void print_interface(outputStream* st, InstanceKlass* intf_klass, const char* intf_type, int indent) {
451 print_indent(st, indent);
452 st->print(" implements ");
453 print_classname(st, intf_klass);
454 st->print(" (%s intf)\n", intf_type);
455 }
456
457 void KlassHierarchy::print_class(outputStream* st, KlassInfoEntry* cie, bool print_interfaces) {
458 ResourceMark rm;
459 InstanceKlass* klass = (InstanceKlass*)cie->klass();
460 int indent = 0;
461
462 // Print indentation with proper indicators of superclass.
463 Klass* super = klass->super();
464 while (super != nullptr) {
465 super = super->super();
466 indent++;
467 }
468 print_indent(st, indent);
469 if (indent != 0) st->print("--");
470
471 // Print the class name, its unique ClassLoader identifier, and if it is an interface.
472 print_classname(st, klass);
473 if (klass->is_interface()) {
474 st->print(" (intf)");
475 }
476 st->print("\n");
477
478 // Print any interfaces the class has.
479 if (print_interfaces) {
480 Array<InstanceKlass*>* local_intfs = klass->local_interfaces();
481 Array<InstanceKlass*>* trans_intfs = klass->transitive_interfaces();
482 for (int i = 0; i < local_intfs->length(); i++) {
483 print_interface(st, local_intfs->at(i), "declared", indent);
484 }
485 for (int i = 0; i < trans_intfs->length(); i++) {
486 InstanceKlass* trans_interface = trans_intfs->at(i);
487 // Only print transitive interfaces if they are not also declared.
488 if (!local_intfs->contains(trans_interface)) {
489 print_interface(st, trans_interface, "inherited", indent);
490 }
491 }
492 }
493 }
494
495 void KlassInfoHisto::print_histo_on(outputStream* st) {
496 st->print_cr(" num #instances #bytes class name (module)");
497 st->print_cr("-------------------------------------------------------");
498 print_elements(st);
499 }
500
501 class HistoClosure : public KlassInfoClosure {
502 private:
503 KlassInfoHisto* _cih;
504 public:
505 HistoClosure(KlassInfoHisto* cih) : _cih(cih) {}
506
507 void do_cinfo(KlassInfoEntry* cie) {
508 _cih->add(cie);
509 }
510 };
511
512 class RecordInstanceClosure : public ObjectClosure {
513 private:
514 KlassInfoTable* _cit;
515 uintx _missed_count;
516 BoolObjectClosure* _filter;
517 public:
518 RecordInstanceClosure(KlassInfoTable* cit, BoolObjectClosure* filter) :
519 _cit(cit), _missed_count(0), _filter(filter) {}
520
521 void do_object(oop obj) {
522 if (should_visit(obj)) {
523 if (!_cit->record_instance(obj)) {
524 _missed_count++;
525 }
526 }
527 }
528
529 uintx missed_count() { return _missed_count; }
530
531 private:
532 bool should_visit(oop obj) {
533 return _filter == nullptr || _filter->do_object_b(obj);
534 }
535 };
536
537 // Heap inspection for every worker.
538 // When native OOM happens for KlassInfoTable, set _success to false.
539 void ParHeapInspectTask::work(uint worker_id) {
540 uintx missed_count = 0;
541 bool merge_success = true;
542 if (!AtomicAccess::load(&_success)) {
543 // other worker has failed on parallel iteration.
544 return;
545 }
546
547 KlassInfoTable cit(false);
548 if (cit.allocation_failed()) {
549 // fail to allocate memory, stop parallel mode
550 AtomicAccess::store(&_success, false);
551 return;
552 }
553 RecordInstanceClosure ric(&cit, _filter);
554 _poi->object_iterate(&ric, worker_id);
555 missed_count = ric.missed_count();
556 {
557 MutexLocker x(&_mutex, Mutex::_no_safepoint_check_flag);
558 merge_success = _shared_cit->merge(&cit);
559 }
560 if (merge_success) {
561 AtomicAccess::add(&_missed_count, missed_count);
562 } else {
563 AtomicAccess::store(&_success, false);
564 }
565 }
566
567 uintx HeapInspection::populate_table(KlassInfoTable* cit, BoolObjectClosure *filter, WorkerThreads* workers) {
568 // Try parallel first.
569 if (workers != nullptr) {
570 ResourceMark rm;
571 ParallelObjectIterator poi(workers->active_workers());
572 ParHeapInspectTask task(&poi, cit, filter);
573 // Run task with the active workers.
574 workers->run_task(&task);
575 if (task.success()) {
576 return task.missed_count();
577 }
578 }
579
580 ResourceMark rm;
581 // If no parallel iteration available, run serially.
582 RecordInstanceClosure ric(cit, filter);
583 Universe::heap()->object_iterate(&ric);
584 return ric.missed_count();
585 }
586
587 void HeapInspection::heap_inspection(outputStream* st, WorkerThreads* workers) {
588 ResourceMark rm;
589
590 KlassInfoTable cit(false);
591 if (!cit.allocation_failed()) {
592 // populate table with object allocation info
593 uintx missed_count = populate_table(&cit, nullptr, workers);
594 if (missed_count != 0) {
595 log_info(gc, classhisto)("WARNING: Ran out of C-heap; undercounted %zu"
596 " total instances in data below",
597 missed_count);
598 }
599
600 // Sort and print klass instance info
601 KlassInfoHisto histo(&cit);
602 HistoClosure hc(&histo);
603
604 cit.iterate(&hc);
605
606 histo.sort();
607 histo.print_histo_on(st);
608 } else {
609 st->print_cr("ERROR: Ran out of C-heap; histogram not generated");
610 }
611 st->flush();
612 }
613
614 class FindInstanceClosure : public ObjectClosure {
615 private:
616 Klass* _klass;
617 GrowableArray<oop>* _result;
618
619 public:
620 FindInstanceClosure(Klass* k, GrowableArray<oop>* result) : _klass(k), _result(result) {};
621
622 void do_object(oop obj) {
623 if (obj->is_a(_klass)) {
624 // obj was read with AS_NO_KEEPALIVE, or equivalent.
625 // The object needs to be kept alive when it is published.
626 Universe::heap()->keep_alive(obj);
627
628 _result->append(obj);
629 }
630 }
631 };
632
633 void HeapInspection::find_instances_at_safepoint(Klass* k, GrowableArray<oop>* result) {
634 assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped");
635 assert(Heap_lock->is_locked(), "should have the Heap_lock");
636
637 // Ensure that the heap is parsable
638 Universe::heap()->ensure_parsability(false); // no need to retire TALBs
639
640 // Iterate over objects in the heap
641 FindInstanceClosure fic(k, result);
642 Universe::heap()->object_iterate(&fic);
643 }