1 /*
2 * Copyright (c) 2003, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classLoaderDataGraph.hpp"
26 #include "classfile/javaClasses.inline.hpp"
27 #include "classfile/symbolTable.hpp"
28 #include "classfile/vmClasses.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "jvmtifiles/jvmtiEnv.hpp"
32 #include "logging/log.hpp"
33 #include "memory/allocation.inline.hpp"
34 #include "memory/resourceArea.hpp"
35 #include "memory/universe.hpp"
36 #include "oops/access.inline.hpp"
37 #include "oops/arrayOop.hpp"
38 #include "oops/constantPool.inline.hpp"
39 #include "oops/fieldStreams.inline.hpp"
40 #include "oops/flatArrayOop.inline.hpp"
41 #include "oops/inlineKlass.inline.hpp"
42 #include "oops/instanceMirrorKlass.hpp"
43 #include "oops/klass.inline.hpp"
44 #include "oops/objArrayKlass.hpp"
45 #include "oops/objArrayOop.inline.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "oops/oopCast.inline.hpp"
48 #include "oops/typeArrayOop.inline.hpp"
49 #include "oops/valuePayload.inline.hpp"
50 #include "prims/jvmtiEventController.inline.hpp"
51 #include "prims/jvmtiExport.hpp"
52 #include "prims/jvmtiImpl.hpp"
53 #include "prims/jvmtiTagMap.hpp"
54 #include "prims/jvmtiTagMapTable.hpp"
55 #include "prims/jvmtiThreadState.hpp"
56 #include "runtime/continuationWrapper.inline.hpp"
57 #include "runtime/deoptimization.hpp"
58 #include "runtime/frame.inline.hpp"
59 #include "runtime/handles.inline.hpp"
60 #include "runtime/interfaceSupport.inline.hpp"
61 #include "runtime/javaCalls.hpp"
62 #include "runtime/javaThread.inline.hpp"
63 #include "runtime/jniHandles.inline.hpp"
64 #include "runtime/mountUnmountDisabler.hpp"
65 #include "runtime/mutex.hpp"
66 #include "runtime/mutexLocker.hpp"
67 #include "runtime/safepoint.hpp"
68 #include "runtime/threadSMR.hpp"
69 #include "runtime/timerTrace.hpp"
70 #include "runtime/vframe.hpp"
71 #include "runtime/vmOperations.hpp"
72 #include "runtime/vmThread.hpp"
73 #include "utilities/macros.hpp"
74 #include "utilities/objectBitSet.inline.hpp"
75
76 typedef ObjectBitSet<mtServiceability> JVMTIBitSet;
77
78
79 // Helper class to store objects to visit.
80 class JvmtiHeapwalkVisitStack {
81 private:
82 enum {
83 initial_visit_stack_size = 4000
84 };
85
86 GrowableArray<JvmtiHeapwalkObject>* _visit_stack;
87 JVMTIBitSet _bitset;
88
89 static GrowableArray<JvmtiHeapwalkObject>* create_visit_stack() {
90 return new (mtServiceability) GrowableArray<JvmtiHeapwalkObject>(initial_visit_stack_size, mtServiceability);
91 }
92
93 public:
94 JvmtiHeapwalkVisitStack(): _visit_stack(create_visit_stack()) {
95 }
96 ~JvmtiHeapwalkVisitStack() {
97 if (_visit_stack != nullptr) {
98 delete _visit_stack;
99 }
100 }
101
102 bool is_empty() const {
103 return _visit_stack->is_empty();
104 }
105
106 void push(const JvmtiHeapwalkObject& obj) {
107 _visit_stack->push(obj);
108 }
109
110 // If the object hasn't been visited then push it onto the visit stack
111 // so that it will be visited later.
112 void check_for_visit(const JvmtiHeapwalkObject& obj) {
113 if (!is_visited(obj)) {
114 _visit_stack->push(obj);
115 }
116 }
117
118 JvmtiHeapwalkObject pop() {
119 return _visit_stack->pop();
120 }
121
122 bool is_visited(const JvmtiHeapwalkObject& obj) {
123 // The method is called only for objects from visit_stack to ensure an object is not visited twice.
124 // Flat objects can be added to visit_stack only when we visit their holder object, so we cannot get duplicate reference to it.
125 if (obj.is_flat()) {
126 return false;
127 }
128 return _bitset.is_marked(obj.obj());
129 }
130
131 void mark_visited(const JvmtiHeapwalkObject& obj) {
132 if (!obj.is_flat()) {
133 _bitset.mark_obj(obj.obj());
134 }
135 }
136 };
137
138 bool JvmtiTagMap::_has_object_free_events = false;
139
140 // create a JvmtiTagMap
141 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
142 _env(env),
143 _lock(Mutex::nosafepoint, "JvmtiTagMap_lock"),
144 _needs_cleaning(false),
145 _posting_events(false),
146 _converting_flat_object(false) {
147
148 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
149 assert(((JvmtiEnvBase *)env)->tag_map() == nullptr, "tag map already exists for environment");
150
151 _hashmap = new JvmtiTagMapTable();
152 _flat_hashmap = new JvmtiFlatTagMapTable();
153
154 // finally add us to the environment
155 ((JvmtiEnvBase *)env)->release_set_tag_map(this);
156 }
157
158 // destroy a JvmtiTagMap
159 JvmtiTagMap::~JvmtiTagMap() {
160
161 // no lock acquired as we assume the enclosing environment is
162 // also being destroyed.
163 ((JvmtiEnvBase *)_env)->set_tag_map(nullptr);
164
165 // finally destroy the hashmap
166 delete _hashmap;
167 _hashmap = nullptr;
168 delete _flat_hashmap;
169 }
170
171 // Called by env_dispose() to reclaim memory before deallocation.
172 // Remove all the entries but keep the empty table intact.
173 // This needs the table lock.
174 void JvmtiTagMap::clear() {
175 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
176 _hashmap->clear();
177 _flat_hashmap->clear();
178 }
179
180 // returns the tag map for the given environments. If the tag map
181 // doesn't exist then it is created.
182 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
183 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire();
184 if (tag_map == nullptr) {
185 MutexLocker mu(JvmtiThreadState_lock);
186 tag_map = ((JvmtiEnvBase*)env)->tag_map();
187 if (tag_map == nullptr) {
188 tag_map = new JvmtiTagMap(env);
189 }
190 } else {
191 DEBUG_ONLY(JavaThread::current()->check_possible_safepoint());
192 }
193 return tag_map;
194 }
195
196 // returns true if the hashmaps are empty
197 bool JvmtiTagMap::is_empty() const {
198 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
199 return _hashmap->is_empty() && _flat_hashmap->is_empty();
200 }
201
202 // This checks for posting and is called from the heap walks.
203 void JvmtiTagMap::check_hashmaps_for_heapwalk(GrowableArray<jlong>* objects) {
204 assert(SafepointSynchronize::is_at_safepoint(), "called from safepoints");
205
206 // Verify that the tag map tables are valid and unconditionally post events
207 // that are expected to be posted before gc_notification.
208 JvmtiEnvIterator it;
209 for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
210 JvmtiTagMap* tag_map = env->tag_map_acquire();
211 if (tag_map != nullptr) {
212 // The ZDriver may be walking the hashmaps concurrently so this lock is needed.
213 MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag);
214 tag_map->remove_dead_entries_locked(objects);
215 }
216 }
217 }
218
219 // Converts entries from JvmtiFlatTagMapTable to JvmtiTagMapTable in batches.
220 // 1. (JvmtiTagMap is locked)
221 // reads entries from JvmtiFlatTagMapTable (describe flat value objects);
222 // 2. (JvmtiTagMap is unlocked)
223 // creates heap-allocated copies of the flat object;
224 // 3. (JvmtiTagMap is locked)
225 // ensures source entry still exists, removes it from JvmtiFlatTagMapTable, adds new entry to JvmtiTagMapTable.
226 // If some error occurs in step 2 (OOM?), the process stops.
227 class JvmtiTagMapFlatEntryConverter: public StackObj {
228 private:
229 struct Entry {
230 // source flat value object
231 Handle holder;
232 int offset;
233 InlineKlass* inline_klass;
234 LayoutKind layout_kind;
235 // converted heap-allocated object
236 Handle dst;
237
238 Entry(): holder(), offset(0), inline_klass(nullptr), dst() {}
239 Entry(Handle holder, int offset, InlineKlass* inline_klass, LayoutKind lk)
240 : holder(holder), offset(offset), inline_klass(inline_klass), layout_kind(lk), dst() {}
241 };
242
243 int _batch_size;
244 GrowableArray<Entry> _entries;
245 bool _has_error;
246
247 public:
248 JvmtiTagMapFlatEntryConverter(int batch_size): _batch_size(batch_size), _entries(batch_size, mtServiceability), _has_error(false) { }
249 ~JvmtiTagMapFlatEntryConverter() {}
250
251 // returns false if there is nothing to convert
252 bool import_entries(JvmtiFlatTagMapTable* table) {
253 if (_has_error) {
254 // stop the process to avoid infinite loop
255 return false;
256 }
257
258 class Importer: public JvmtiFlatTagMapKeyClosure {
259 private:
260 GrowableArray<Entry>& _entries;
261 int _batch_size;
262 public:
263 Importer(GrowableArray<Entry>& entries, int batch_size): _entries(entries), _batch_size(batch_size) {}
264
265 bool do_entry(JvmtiFlatTagMapKey& key, jlong& tag) {
266 Entry entry(Handle(Thread::current(), key.holder()), key.offset(), key.inline_klass(), key.layout_kind());
267 _entries.append(entry);
268
269 return _entries.length() < _batch_size;
270 }
271 } importer(_entries, _batch_size);
272 table->entry_iterate(&importer);
273
274 return !_entries.is_empty();
275 }
276
277 void convert() {
278 for (int i = 0; i < _entries.length(); i++) {
279 EXCEPTION_MARK;
280 Entry& entry = _entries.at(i);
281 FlatValuePayload payload = FlatValuePayload::construct_from_parts(
282 entry.holder(), entry.offset, entry.inline_klass, entry.layout_kind);
283 oop obj = payload.read(JavaThread::current());
284
285 if (HAS_PENDING_EXCEPTION) {
286 tty->print_cr("Exception in JvmtiTagMapFlatEntryConverter: ");
287 java_lang_Throwable::print(PENDING_EXCEPTION, tty);
288 tty->cr();
289 CLEAR_PENDING_EXCEPTION;
290 // stop the conversion
291 _has_error = true;
292 } else {
293 entry.dst = Handle(Thread::current(), obj);
294 }
295 }
296 }
297
298 // returns number of converted entries
299 int move(JvmtiFlatTagMapTable* src_table, JvmtiTagMapTable* dst_table) {
300 int count = 0;
301 for (int i = 0; i < _entries.length(); i++) {
302 Entry& entry = _entries.at(i);
303 if (entry.dst() == nullptr) {
304 // some error during conversion, skip the entry
305 continue;
306 }
307 JvmtiHeapwalkObject obj(entry.holder(), entry.offset, entry.inline_klass, entry.layout_kind);
308 jlong tag = src_table->remove(obj);
309
310 if (tag != 0) { // ensure the entry is still in the src_table
311 dst_table->add(entry.dst(), tag);
312 count++;
313 } else {
314
315 }
316 }
317 // and clean the array
318 _entries.clear();
319 return count;
320 }
321 };
322
323 void JvmtiTagMap::convert_flat_object_entries() {
324 Thread* current = Thread::current();
325 assert(current->is_Java_thread(), "must be executed on JavaThread");
326
327 log_debug(jvmti, table)("convert_flat_object_entries, main table size = %d, flat table size = %d",
328 _hashmap->number_of_entries(), _flat_hashmap->number_of_entries());
329
330 {
331 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
332 // If another thread is converting, let it finish.
333 while (_converting_flat_object) {
334 ml.wait();
335 }
336 if (_flat_hashmap->is_empty()) {
337 // nothing to convert
338 return;
339 }
340 _converting_flat_object = true;
341 }
342
343 const int BATCH_SIZE = 1024;
344 JvmtiTagMapFlatEntryConverter converter(BATCH_SIZE);
345
346 int count = 0;
347 while (true) {
348 HandleMark hm(current);
349 {
350 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
351 if (!converter.import_entries(_flat_hashmap)) {
352 break;
353 }
354 }
355 // Convert flat objects to heap-allocated without table lock (so agent callbacks can get/set tags).
356 converter.convert();
357 {
358 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
359 count += converter.move(_flat_hashmap, _hashmap);
360 }
361 }
362
363 log_info(jvmti, table)("%d flat value objects are converted, flat table size = %d",
364 count, _flat_hashmap->number_of_entries());
365 {
366 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
367 _converting_flat_object = false;
368 ml.notify_all();
369 }
370 }
371
372 jlong JvmtiTagMap::find(const JvmtiHeapwalkObject& obj) const {
373 jlong tag = _hashmap->find(obj);
374 if (tag == 0 && obj.is_value()) {
375 tag = _flat_hashmap->find(obj);
376 }
377 return tag;
378 }
379
380 void JvmtiTagMap::add(const JvmtiHeapwalkObject& obj, jlong tag) {
381 if (obj.is_flat()) {
382 // we may have tag for equal (non-flat) object in _hashmap, try to update it 1st
383 if (!_hashmap->update(obj, tag)) {
384 // no entry in _hashmap, add to _flat_hashmap
385 _flat_hashmap->add(obj, tag);
386 }
387 } else {
388 _hashmap->add(obj, tag);
389 }
390 }
391
392 void JvmtiTagMap::remove(const JvmtiHeapwalkObject& obj) {
393 if (!_hashmap->remove(obj)) {
394 if (obj.is_value()) {
395 _flat_hashmap->remove(obj);
396 }
397 }
398 }
399
400 // A CallbackWrapper is a support class for querying and tagging an object
401 // around a callback to a profiler. The constructor does pre-callback
402 // work to get the tag value, klass tag value, ... and the destructor
403 // does the post-callback work of tagging or untagging the object.
404 //
405 // {
406 // CallbackWrapper wrapper(tag_map, o);
407 //
408 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
409 //
410 // }
411 // wrapper goes out of scope here which results in the destructor
412 // checking to see if the object has been tagged, untagged, or the
413 // tag value has changed.
414 //
415 class CallbackWrapper : public StackObj {
416 private:
417 JvmtiTagMap* _tag_map;
418 const JvmtiHeapwalkObject& _o;
419 jlong _obj_size;
420 jlong _obj_tag;
421 jlong _klass_tag;
422
423 protected:
424 JvmtiTagMap* tag_map() const { return _tag_map; }
425
426 // invoked post-callback to tag, untag, or update the tag of an object
427 void inline post_callback_tag_update(const JvmtiHeapwalkObject& o, JvmtiTagMap* tag_map, jlong obj_tag);
428
429 public:
430 CallbackWrapper(JvmtiTagMap* tag_map, const JvmtiHeapwalkObject& o)
431 : _tag_map(tag_map), _o(o)
432 {
433 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
434 "MT unsafe or must be VM thread");
435
436 // object size
437 if (!o.is_flat()) {
438 // common case: we have oop
439 _obj_size = (jlong)o.obj()->size() * wordSize;
440 } else {
441 // flat value object, we know its InstanceKlass
442 assert(_o.inline_klass() != nullptr, "must be");
443 _obj_size = _o.inline_klass()->size() * wordSize;;
444 }
445
446 // get object tag
447 _obj_tag = _tag_map->find(_o);
448
449 // get the class and the class's tag value
450 assert(vmClasses::Class_klass()->is_mirror_instance_klass(), "Is not?");
451
452 _klass_tag = _tag_map->find(_o.klass()->java_mirror());
453 }
454
455 ~CallbackWrapper() {
456 post_callback_tag_update(_o, _tag_map, _obj_tag);
457 }
458
459 inline jlong* obj_tag_p() { return &_obj_tag; }
460 inline jlong obj_size() const { return _obj_size; }
461 inline jlong obj_tag() const { return _obj_tag; }
462 inline jlong klass_tag() const { return _klass_tag; }
463 };
464
465 // callback post-callback to tag, untag, or update the tag of an object
466 void inline CallbackWrapper::post_callback_tag_update(const JvmtiHeapwalkObject& o,
467 JvmtiTagMap* tag_map,
468 jlong obj_tag) {
469 if (obj_tag == 0) {
470 // callback has untagged the object, remove the entry if present
471 tag_map->remove(o);
472 } else {
473 // object was previously tagged or not present - the callback may have
474 // changed the tag value
475 assert(Thread::current()->is_VM_thread(), "must be VMThread");
476 tag_map->add(o, obj_tag);
477 }
478 }
479
480 // An extended CallbackWrapper used when reporting an object reference
481 // to the agent.
482 //
483 // {
484 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
485 //
486 // (*callback)(wrapper.klass_tag(),
487 // wrapper.obj_size(),
488 // wrapper.obj_tag_p()
489 // wrapper.referrer_tag_p(), ...)
490 //
491 // }
492 // wrapper goes out of scope here which results in the destructor
493 // checking to see if the referrer object has been tagged, untagged,
494 // or the tag value has changed.
495 //
496 class TwoOopCallbackWrapper : public CallbackWrapper {
497 private:
498 const JvmtiHeapwalkObject& _referrer;
499 bool _is_reference_to_self;
500 jlong _referrer_obj_tag;
501 jlong _referrer_klass_tag;
502 jlong* _referrer_tag_p;
503
504 bool is_reference_to_self() const { return _is_reference_to_self; }
505
506 public:
507 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& o) :
508 CallbackWrapper(tag_map, o), _referrer(referrer)
509 {
510 // self reference needs to be handled in a special way
511 _is_reference_to_self = (referrer == o);
512
513 if (_is_reference_to_self) {
514 _referrer_klass_tag = klass_tag();
515 _referrer_tag_p = obj_tag_p();
516 } else {
517 // get object tag
518 _referrer_obj_tag = tag_map->find(_referrer);
519
520 _referrer_tag_p = &_referrer_obj_tag;
521
522 // get referrer class tag.
523 _referrer_klass_tag = tag_map->find(_referrer.klass()->java_mirror());
524 }
525 }
526
527 ~TwoOopCallbackWrapper() {
528 if (!is_reference_to_self()) {
529 post_callback_tag_update(_referrer,
530 tag_map(),
531 _referrer_obj_tag);
532 }
533 }
534
535 // address of referrer tag
536 // (for a self reference this will return the same thing as obj_tag_p())
537 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
538
539 // referrer's class tag
540 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
541 };
542
543 // tag an object
544 //
545 // This function is performance critical. If many threads attempt to tag objects
546 // around the same time then it's possible that the Mutex associated with the
547 // tag map will be a hot lock.
548 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
549 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
550
551 // resolve the object
552 oop o = JNIHandles::resolve_non_null(object);
553
554 // see if the object is already tagged
555 JvmtiHeapwalkObject obj(o);
556 if (tag == 0) {
557 // remove the entry if present
558 _hashmap->remove(obj);
559 } else {
560 // if the object is already tagged or not present then we add/update
561 // the tag
562 add(obj, tag);
563 }
564 }
565
566 // get the tag for an object
567 jlong JvmtiTagMap::get_tag(jobject object) {
568 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
569
570 // resolve the object
571 oop o = JNIHandles::resolve_non_null(object);
572
573 return find(o);
574 }
575
576
577 // Helper class used to describe the static or instance fields of a class.
578 // For each field it holds the field index (as defined by the JVMTI specification),
579 // the field type, and the offset.
580
581 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
582 private:
583 int _field_index;
584 int _field_offset;
585 char _field_type;
586 InlineKlass* _inline_klass; // nullptr for heap object
587 LayoutKind _layout_kind;
588 public:
589 ClassFieldDescriptor(int index, const FieldStreamBase& fld) :
590 _field_index(index), _field_offset(fld.offset()), _field_type(fld.signature()->char_at(0)) {
591 if (fld.is_flat()) {
592 const fieldDescriptor& fd = fld.field_descriptor();
593 InstanceKlass* holder_klass = fd.field_holder();
594 InlineLayoutInfo* layout_info = holder_klass->inline_layout_info_adr(fd.index());
595 _inline_klass = layout_info->klass();
596 _layout_kind = layout_info->kind();
597 } else {
598 _inline_klass = nullptr;
599 _layout_kind = LayoutKind::REFERENCE;
600 }
601 }
602 int field_index() const { return _field_index; }
603 char field_type() const { return _field_type; }
604 int field_offset() const { return _field_offset; }
605 bool is_flat() const { return _inline_klass != nullptr; }
606 InlineKlass* inline_klass() const { return _inline_klass; }
607 LayoutKind layout_kind() const { return _layout_kind; }
608 };
609
610 class ClassFieldMap: public CHeapObj<mtInternal> {
611 private:
612 enum {
613 initial_field_count = 5
614 };
615
616 // list of field descriptors
617 GrowableArray<ClassFieldDescriptor*>* _fields;
618
619 // constructor
620 ClassFieldMap();
621
622 // calculates number of fields in all interfaces
623 static int interfaces_field_count(InstanceKlass* ik);
624
625 // add a field
626 void add(int index, const FieldStreamBase& fld);
627
628 public:
629 ~ClassFieldMap();
630
631 // access
632 int field_count() { return _fields->length(); }
633 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
634
635 // functions to create maps of static or instance fields
636 static ClassFieldMap* create_map_of_static_fields(Klass* k);
637 static ClassFieldMap* create_map_of_instance_fields(Klass* k);
638 };
639
640 ClassFieldMap::ClassFieldMap() {
641 _fields = new (mtServiceability)
642 GrowableArray<ClassFieldDescriptor*>(initial_field_count, mtServiceability);
643 }
644
645 ClassFieldMap::~ClassFieldMap() {
646 for (int i=0; i<_fields->length(); i++) {
647 delete _fields->at(i);
648 }
649 delete _fields;
650 }
651
652 int ClassFieldMap::interfaces_field_count(InstanceKlass* ik) {
653 const Array<InstanceKlass*>* interfaces = ik->transitive_interfaces();
654 int count = 0;
655 for (int i = 0; i < interfaces->length(); i++) {
656 count += interfaces->at(i)->java_fields_count();
657
658 }
659 return count;
660 }
661
662 void ClassFieldMap::add(int index, const FieldStreamBase& fld) {
663 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, fld);
664 _fields->append(field);
665 }
666
667 // Returns a heap allocated ClassFieldMap to describe the static fields
668 // of the given class.
669 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
670 InstanceKlass* ik = InstanceKlass::cast(k);
671
672 // create the field map
673 ClassFieldMap* field_map = new ClassFieldMap();
674
675 // Static fields of interfaces and superclasses are reported as references from the interfaces/superclasses.
676 // Need to calculate start index of this class fields: number of fields in all interfaces and superclasses.
677 int index = interfaces_field_count(ik);
678 for (InstanceKlass* super_klass = ik->super(); super_klass != nullptr; super_klass = super_klass->super()) {
679 index += super_klass->java_fields_count();
680 }
681
682 for (JavaFieldStream fld(ik); !fld.done(); fld.next(), index++) {
683 // ignore instance fields
684 if (!fld.access_flags().is_static()) {
685 continue;
686 }
687 field_map->add(index, fld);
688 }
689
690 return field_map;
691 }
692
693 // Returns a heap allocated ClassFieldMap to describe the instance fields
694 // of the given class. All instance fields are included (this means public
695 // and private fields declared in superclasses too).
696 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(Klass* k) {
697 InstanceKlass* ik = InstanceKlass::cast(k);
698
699 // create the field map
700 ClassFieldMap* field_map = new ClassFieldMap();
701
702 // fields of the superclasses are reported first, so need to know total field number to calculate field indices
703 int total_field_number = interfaces_field_count(ik);
704 for (InstanceKlass* klass = ik; klass != nullptr; klass = klass->super()) {
705 total_field_number += klass->java_fields_count();
706 }
707
708 for (InstanceKlass* klass = ik; klass != nullptr; klass = klass->super()) {
709 JavaFieldStream fld(klass);
710 int start_index = total_field_number - klass->java_fields_count();
711 for (int index = 0; !fld.done(); fld.next(), index++) {
712 // ignore static fields
713 if (fld.access_flags().is_static()) {
714 continue;
715 }
716 field_map->add(start_index + index, fld);
717 }
718 // update total_field_number for superclass (decrease by the field count in the current class)
719 total_field_number = start_index;
720 }
721
722 return field_map;
723 }
724
725 // Helper class used to cache a ClassFileMap for the instance fields of
726 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
727 // heap iteration and avoid creating a field map for each object in the heap
728 // (only need to create the map when the first instance of a class is encountered).
729 //
730 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
731 private:
732 enum {
733 initial_class_count = 200
734 };
735 ClassFieldMap* _field_map;
736
737 ClassFieldMap* field_map() const { return _field_map; }
738
739 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
740 ~JvmtiCachedClassFieldMap();
741
742 static GrowableArray<InstanceKlass*>* _class_list;
743 static void add_to_class_list(InstanceKlass* ik);
744
745 public:
746 // returns the field map for a given klass (returning map cached
747 // by InstanceKlass if possible
748 static ClassFieldMap* get_map_of_instance_fields(Klass* k);
749
750 // removes the field map from all instanceKlasses - should be
751 // called before VM operation completes
752 static void clear_cache();
753
754 // returns the number of ClassFieldMap cached by instanceKlasses
755 static int cached_field_map_count();
756 };
757
758 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
759
760 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
761 _field_map = field_map;
762 }
763
764 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
765 if (_field_map != nullptr) {
766 delete _field_map;
767 }
768 }
769
770 // Marker class to ensure that the class file map cache is only used in a defined
771 // scope.
772 class ClassFieldMapCacheMark : public StackObj {
773 private:
774 static bool _is_active;
775 public:
776 ClassFieldMapCacheMark() {
777 assert(Thread::current()->is_VM_thread(), "must be VMThread");
778 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
779 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
780 _is_active = true;
781 }
782 ~ClassFieldMapCacheMark() {
783 JvmtiCachedClassFieldMap::clear_cache();
784 _is_active = false;
785 }
786 static bool is_active() { return _is_active; }
787 };
788
789 bool ClassFieldMapCacheMark::_is_active;
790
791 // record that the given InstanceKlass is caching a field map
792 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
793 if (_class_list == nullptr) {
794 _class_list = new (mtServiceability)
795 GrowableArray<InstanceKlass*>(initial_class_count, mtServiceability);
796 }
797 _class_list->push(ik);
798 }
799
800 // returns the instance field map for the given klass
801 // (returns field map cached by the InstanceKlass if possible)
802 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(Klass *k) {
803 assert(Thread::current()->is_VM_thread(), "must be VMThread");
804 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
805
806 InstanceKlass* ik = InstanceKlass::cast(k);
807
808 // return cached map if possible
809 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
810 if (cached_map != nullptr) {
811 assert(cached_map->field_map() != nullptr, "missing field list");
812 return cached_map->field_map();
813 } else {
814 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(k);
815 cached_map = new JvmtiCachedClassFieldMap(field_map);
816 ik->set_jvmti_cached_class_field_map(cached_map);
817 add_to_class_list(ik);
818 return field_map;
819 }
820 }
821
822 // remove the fields maps cached from all instanceKlasses
823 void JvmtiCachedClassFieldMap::clear_cache() {
824 assert(Thread::current()->is_VM_thread(), "must be VMThread");
825 if (_class_list != nullptr) {
826 for (int i = 0; i < _class_list->length(); i++) {
827 InstanceKlass* ik = _class_list->at(i);
828 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
829 assert(cached_map != nullptr, "should not be null");
830 ik->set_jvmti_cached_class_field_map(nullptr);
831 delete cached_map; // deletes the encapsulated field map
832 }
833 delete _class_list;
834 _class_list = nullptr;
835 }
836 }
837
838 // returns the number of ClassFieldMap cached by instanceKlasses
839 int JvmtiCachedClassFieldMap::cached_field_map_count() {
840 return (_class_list == nullptr) ? 0 : _class_list->length();
841 }
842
843 // helper function to indicate if an object is filtered by its tag or class tag
844 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
845 jlong klass_tag,
846 int heap_filter) {
847 // apply the heap filter
848 if (obj_tag != 0) {
849 // filter out tagged objects
850 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
851 } else {
852 // filter out untagged objects
853 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
854 }
855 if (klass_tag != 0) {
856 // filter out objects with tagged classes
857 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
858 } else {
859 // filter out objects with untagged classes.
860 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
861 }
862 return false;
863 }
864
865 // helper function to indicate if an object is filtered by a klass filter
866 static inline bool is_filtered_by_klass_filter(const JvmtiHeapwalkObject& obj, Klass* klass_filter) {
867 if (klass_filter != nullptr) {
868 if (obj.klass() != klass_filter) {
869 return true;
870 }
871 }
872 return false;
873 }
874
875 // helper function to tell if a field is a primitive field or not
876 static inline bool is_primitive_field_type(char type) {
877 return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY);
878 }
879
880 // helper function to copy the value from location addr to jvalue.
881 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
882 switch (value_type) {
883 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
884 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
885 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
886 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
887 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
888 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
889 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
890 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
891 default: ShouldNotReachHere();
892 }
893 }
894
895 // helper function to invoke string primitive value callback
896 // returns visit control flags
897 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
898 CallbackWrapper* wrapper,
899 const JvmtiHeapwalkObject& obj,
900 void* user_data)
901 {
902 assert(!obj.is_flat(), "cannot be flat");
903 oop str = obj.obj();
904 assert(str->klass() == vmClasses::String_klass(), "not a string");
905
906 typeArrayOop s_value = java_lang_String::value(str);
907
908 // JDK-6584008: the value field may be null if a String instance is
909 // partially constructed.
910 if (s_value == nullptr) {
911 return 0;
912 }
913 // get the string value and length
914 // (string value may be offset from the base)
915 int s_len = java_lang_String::length(str);
916 bool is_latin1 = java_lang_String::is_latin1(str);
917 jchar* value;
918 if (s_len > 0) {
919 if (!is_latin1) {
920 value = s_value->char_at_addr(0);
921 } else {
922 // Inflate latin1 encoded string to UTF16
923 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
924 for (int i = 0; i < s_len; i++) {
925 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
926 }
927 value = &buf[0];
928 }
929 } else {
930 // Don't use char_at_addr(0) if length is 0
931 value = (jchar*) s_value->base(T_CHAR);
932 }
933
934 // invoke the callback
935 jint res = (*cb)(wrapper->klass_tag(),
936 wrapper->obj_size(),
937 wrapper->obj_tag_p(),
938 value,
939 (jint)s_len,
940 user_data);
941
942 if (is_latin1 && s_len > 0) {
943 FREE_C_HEAP_ARRAY(value);
944 }
945 return res;
946 }
947
948 // helper function to invoke string primitive value callback
949 // returns visit control flags
950 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
951 CallbackWrapper* wrapper,
952 const JvmtiHeapwalkObject& obj,
953 void* user_data)
954 {
955 assert(!obj.is_flat(), "cannot be flat");
956 assert(obj.obj()->is_typeArray(), "not a primitive array");
957
958 // get base address of first element
959 typeArrayOop array = typeArrayOop(obj.obj());
960 BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
961 void* elements = array->base(type);
962
963 // jvmtiPrimitiveType is defined so this mapping is always correct
964 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
965
966 return (*cb)(wrapper->klass_tag(),
967 wrapper->obj_size(),
968 wrapper->obj_tag_p(),
969 (jint)array->length(),
970 elem_type,
971 elements,
972 user_data);
973 }
974
975 // helper function to invoke the primitive field callback for all static fields
976 // of a given class
977 static jint invoke_primitive_field_callback_for_static_fields
978 (CallbackWrapper* wrapper,
979 oop obj,
980 jvmtiPrimitiveFieldCallback cb,
981 void* user_data)
982 {
983 // for static fields only the index will be set
984 static jvmtiHeapReferenceInfo reference_info = { 0 };
985
986 assert(obj->klass() == vmClasses::Class_klass(), "not a class");
987 if (java_lang_Class::is_primitive(obj)) {
988 return 0;
989 }
990 Klass* klass = java_lang_Class::as_Klass(obj);
991
992 // ignore classes for object and type arrays
993 if (!klass->is_instance_klass()) {
994 return 0;
995 }
996
997 // ignore classes which aren't linked yet
998 InstanceKlass* ik = InstanceKlass::cast(klass);
999 if (!ik->is_linked()) {
1000 return 0;
1001 }
1002
1003 // get the field map
1004 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1005
1006 // invoke the callback for each static primitive field
1007 for (int i=0; i<field_map->field_count(); i++) {
1008 ClassFieldDescriptor* field = field_map->field_at(i);
1009
1010 // ignore non-primitive fields
1011 char type = field->field_type();
1012 if (!is_primitive_field_type(type)) {
1013 continue;
1014 }
1015 // one-to-one mapping
1016 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1017
1018 // get offset and field value
1019 int offset = field->field_offset();
1020 address addr = cast_from_oop<address>(klass->java_mirror()) + offset;
1021 jvalue value;
1022 copy_to_jvalue(&value, addr, value_type);
1023
1024 // field index
1025 reference_info.field.index = field->field_index();
1026
1027 // invoke the callback
1028 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1029 &reference_info,
1030 wrapper->klass_tag(),
1031 wrapper->obj_tag_p(),
1032 value,
1033 value_type,
1034 user_data);
1035 if (res & JVMTI_VISIT_ABORT) {
1036 delete field_map;
1037 return res;
1038 }
1039 }
1040
1041 delete field_map;
1042 return 0;
1043 }
1044
1045 // helper function to invoke the primitive field callback for all instance fields
1046 // of a given object
1047 static jint invoke_primitive_field_callback_for_instance_fields(
1048 CallbackWrapper* wrapper,
1049 const JvmtiHeapwalkObject& obj,
1050 jvmtiPrimitiveFieldCallback cb,
1051 void* user_data)
1052 {
1053 // for instance fields only the index will be set
1054 static jvmtiHeapReferenceInfo reference_info = { 0 };
1055
1056 // get the map of the instance fields
1057 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj.klass());
1058
1059 // invoke the callback for each instance primitive field
1060 for (int i=0; i<fields->field_count(); i++) {
1061 ClassFieldDescriptor* field = fields->field_at(i);
1062
1063 // ignore non-primitive fields
1064 char type = field->field_type();
1065 if (!is_primitive_field_type(type)) {
1066 continue;
1067 }
1068 // one-to-one mapping
1069 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1070
1071 // get field value
1072 address addr = cast_from_oop<address>(obj.obj()) + obj.offset() + field->field_offset();
1073 jvalue value;
1074 copy_to_jvalue(&value, addr, value_type);
1075
1076 // field index
1077 reference_info.field.index = field->field_index();
1078
1079 // invoke the callback
1080 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1081 &reference_info,
1082 wrapper->klass_tag(),
1083 wrapper->obj_tag_p(),
1084 value,
1085 value_type,
1086 user_data);
1087 if (res & JVMTI_VISIT_ABORT) {
1088 return res;
1089 }
1090 }
1091 return 0;
1092 }
1093
1094
1095 // VM operation to iterate over all objects in the heap (both reachable
1096 // and unreachable)
1097 class VM_HeapIterateOperation: public VM_Operation {
1098 private:
1099 ObjectClosure* _blk;
1100 GrowableArray<jlong>* const _dead_objects;
1101 public:
1102 VM_HeapIterateOperation(ObjectClosure* blk, GrowableArray<jlong>* objects) :
1103 _blk(blk), _dead_objects(objects) { }
1104
1105 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1106 void doit() {
1107 // allows class files maps to be cached during iteration
1108 ClassFieldMapCacheMark cm;
1109
1110 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects);
1111
1112 // make sure that heap is parsable (fills TLABs with filler objects)
1113 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
1114
1115 // Verify heap before iteration - if the heap gets corrupted then
1116 // JVMTI's IterateOverHeap will crash.
1117 if (VerifyBeforeIteration) {
1118 Universe::verify();
1119 }
1120
1121 // do the iteration
1122 Universe::heap()->object_iterate(_blk);
1123 }
1124 };
1125
1126
1127 // An ObjectClosure used to support the deprecated IterateOverHeap and
1128 // IterateOverInstancesOfClass functions
1129 class IterateOverHeapObjectClosure: public ObjectClosure {
1130 private:
1131 JvmtiTagMap* _tag_map;
1132 Klass* _klass;
1133 jvmtiHeapObjectFilter _object_filter;
1134 jvmtiHeapObjectCallback _heap_object_callback;
1135 const void* _user_data;
1136
1137 // accessors
1138 JvmtiTagMap* tag_map() const { return _tag_map; }
1139 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
1140 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1141 Klass* klass() const { return _klass; }
1142 const void* user_data() const { return _user_data; }
1143
1144 // indicates if iteration has been aborted
1145 bool _iteration_aborted;
1146 bool is_iteration_aborted() const { return _iteration_aborted; }
1147 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
1148
1149 public:
1150 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1151 Klass* klass,
1152 jvmtiHeapObjectFilter object_filter,
1153 jvmtiHeapObjectCallback heap_object_callback,
1154 const void* user_data) :
1155 _tag_map(tag_map),
1156 _klass(klass),
1157 _object_filter(object_filter),
1158 _heap_object_callback(heap_object_callback),
1159 _user_data(user_data),
1160 _iteration_aborted(false)
1161 {
1162 }
1163
1164 void do_object(oop o);
1165 };
1166
1167 // invoked for each object in the heap
1168 void IterateOverHeapObjectClosure::do_object(oop o) {
1169 assert(o != nullptr, "Heap iteration should never produce null!");
1170 // check if iteration has been halted
1171 if (is_iteration_aborted()) return;
1172
1173 // instanceof check when filtering by klass
1174 if (klass() != nullptr && !o->is_a(klass())) {
1175 return;
1176 }
1177
1178 // skip if object is a dormant shared object whose mirror hasn't been loaded
1179 if (o->klass()->java_mirror() == nullptr) {
1180 log_debug(aot, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(o),
1181 o->klass()->external_name());
1182 return;
1183 }
1184
1185 // prepare for the calllback
1186 JvmtiHeapwalkObject wrapper_obj(o);
1187 CallbackWrapper wrapper(tag_map(), wrapper_obj);
1188
1189 // if the object is tagged and we're only interested in untagged objects
1190 // then don't invoke the callback. Similarly, if the object is untagged
1191 // and we're only interested in tagged objects we skip the callback.
1192 if (wrapper.obj_tag() != 0) {
1193 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1194 } else {
1195 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1196 }
1197
1198 // invoke the agent's callback
1199 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1200 wrapper.obj_size(),
1201 wrapper.obj_tag_p(),
1202 (void*)user_data());
1203 if (control == JVMTI_ITERATION_ABORT) {
1204 set_iteration_aborted(true);
1205 }
1206 }
1207
1208 // An ObjectClosure used to support the IterateThroughHeap function
1209 class IterateThroughHeapObjectClosure: public ObjectClosure {
1210 private:
1211 JvmtiTagMap* _tag_map;
1212 Klass* _klass;
1213 int _heap_filter;
1214 const jvmtiHeapCallbacks* _callbacks;
1215 const void* _user_data;
1216
1217 // accessor functions
1218 JvmtiTagMap* tag_map() const { return _tag_map; }
1219 int heap_filter() const { return _heap_filter; }
1220 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
1221 Klass* klass() const { return _klass; }
1222 const void* user_data() const { return _user_data; }
1223
1224 // indicates if the iteration has been aborted
1225 bool _iteration_aborted;
1226 bool is_iteration_aborted() const { return _iteration_aborted; }
1227
1228 // used to check the visit control flags. If the abort flag is set
1229 // then we set the iteration aborted flag so that the iteration completes
1230 // without processing any further objects
1231 bool check_flags_for_abort(jint flags) {
1232 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1233 if (is_abort) {
1234 _iteration_aborted = true;
1235 }
1236 return is_abort;
1237 }
1238
1239 void visit_object(const JvmtiHeapwalkObject& obj);
1240 void visit_flat_fields(const JvmtiHeapwalkObject& obj);
1241 void visit_flat_array_elements(const JvmtiHeapwalkObject& obj);
1242
1243 public:
1244 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1245 Klass* klass,
1246 int heap_filter,
1247 const jvmtiHeapCallbacks* heap_callbacks,
1248 const void* user_data) :
1249 _tag_map(tag_map),
1250 _klass(klass),
1251 _heap_filter(heap_filter),
1252 _callbacks(heap_callbacks),
1253 _user_data(user_data),
1254 _iteration_aborted(false)
1255 {
1256 }
1257
1258 void do_object(oop obj);
1259 };
1260
1261 // invoked for each object in the heap
1262 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1263 assert(obj != nullptr, "Heap iteration should never produce null!");
1264 // check if iteration has been halted
1265 if (is_iteration_aborted()) return;
1266
1267 // skip if object is a dormant shared object whose mirror hasn't been loaded
1268 if (obj != nullptr && obj->klass()->java_mirror() == nullptr) {
1269 log_debug(aot, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(obj),
1270 obj->klass()->external_name());
1271 return;
1272 }
1273
1274 visit_object(obj);
1275 }
1276
1277 void IterateThroughHeapObjectClosure::visit_object(const JvmtiHeapwalkObject& obj) {
1278 // apply class filter
1279 if (is_filtered_by_klass_filter(obj, klass())) return;
1280
1281 // prepare for callback
1282 CallbackWrapper wrapper(tag_map(), obj);
1283
1284 // check if filtered by the heap filter
1285 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1286 return;
1287 }
1288
1289 // for arrays we need the length, otherwise -1
1290 bool is_array = obj.klass()->is_array_klass();
1291 int len = is_array ? arrayOop(obj.obj())->length() : -1;
1292
1293 // invoke the object callback (if callback is provided)
1294 if (callbacks()->heap_iteration_callback != nullptr) {
1295 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1296 jint res = (*cb)(wrapper.klass_tag(),
1297 wrapper.obj_size(),
1298 wrapper.obj_tag_p(),
1299 (jint)len,
1300 (void*)user_data());
1301 if (check_flags_for_abort(res)) return;
1302 }
1303
1304 // for objects and classes we report primitive fields if callback provided
1305 if (callbacks()->primitive_field_callback != nullptr && obj.klass()->is_instance_klass()) {
1306 jint res;
1307 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1308 if (obj.klass() == vmClasses::Class_klass()) {
1309 assert(!obj.is_flat(), "Class object cannot be flattened");
1310 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1311 obj.obj(),
1312 cb,
1313 (void*)user_data());
1314 } else {
1315 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1316 obj,
1317 cb,
1318 (void*)user_data());
1319 }
1320 if (check_flags_for_abort(res)) return;
1321 }
1322
1323 // string callback
1324 if (!is_array &&
1325 callbacks()->string_primitive_value_callback != nullptr &&
1326 obj.klass() == vmClasses::String_klass()) {
1327 jint res = invoke_string_value_callback(
1328 callbacks()->string_primitive_value_callback,
1329 &wrapper,
1330 obj,
1331 (void*)user_data());
1332 if (check_flags_for_abort(res)) return;
1333 }
1334
1335 // array callback
1336 if (is_array &&
1337 callbacks()->array_primitive_value_callback != nullptr &&
1338 obj.klass()->is_typeArray_klass()) {
1339 jint res = invoke_array_primitive_value_callback(
1340 callbacks()->array_primitive_value_callback,
1341 &wrapper,
1342 obj,
1343 (void*)user_data());
1344 if (check_flags_for_abort(res)) return;
1345 }
1346
1347 // All info for the object is reported.
1348
1349 // If the object has flat fields, report them as heap objects.
1350 if (obj.klass()->is_instance_klass()) {
1351 if (InstanceKlass::cast(obj.klass())->has_inlined_fields()) {
1352 visit_flat_fields(obj);
1353 // check if iteration has been halted
1354 if (is_iteration_aborted()) {
1355 return;
1356 }
1357 }
1358 }
1359 // If the object is flat array, report all elements as heap objects.
1360 if (is_array && obj.obj()->is_flatArray()) {
1361 assert(!obj.is_flat(), "Array object cannot be flattened");
1362 visit_flat_array_elements(obj);
1363 }
1364 }
1365
1366 void IterateThroughHeapObjectClosure::visit_flat_fields(const JvmtiHeapwalkObject& obj) {
1367 // iterate over instance fields
1368 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj.klass());
1369 for (int i = 0; i < fields->field_count(); i++) {
1370 ClassFieldDescriptor* field = fields->field_at(i);
1371 // skip non-flat and (for safety) primitive fields
1372 if (!field->is_flat() || is_primitive_field_type(field->field_type())) {
1373 continue;
1374 }
1375
1376 int field_offset = field->field_offset();
1377 if (obj.is_flat()) {
1378 // the object is inlined, its fields are stored without the header
1379 field_offset += obj.offset() - obj.inline_klass()->payload_offset();
1380 }
1381 // check for possible nulls
1382 if (LayoutKindHelper::is_nullable_flat(field->layout_kind())) {
1383 address payload = cast_from_oop<address>(obj.obj()) + field_offset;
1384 if (field->inline_klass()->is_payload_marked_as_null(payload)) {
1385 continue;
1386 }
1387 }
1388 JvmtiHeapwalkObject field_obj(obj.obj(), field_offset, field->inline_klass(), field->layout_kind());
1389
1390 visit_object(field_obj);
1391
1392 // check if iteration has been halted
1393 if (is_iteration_aborted()) {
1394 return;
1395 }
1396 }
1397 }
1398
1399 void IterateThroughHeapObjectClosure::visit_flat_array_elements(const JvmtiHeapwalkObject& obj) {
1400 assert(!obj.is_flat() && obj.obj()->is_flatArray() , "sanity check");
1401 flatArrayOop array = flatArrayOop(obj.obj());
1402 FlatArrayKlass* fak = array->klass();
1403 InlineKlass* vk = fak->element_klass();
1404 bool need_null_check = LayoutKindHelper::is_nullable_flat(fak->layout_kind());
1405
1406 for (int index = 0; index < array->length(); index++) {
1407 address addr = (address)array->value_at_addr(index, fak->layout_helper());
1408 // check for null
1409 if (need_null_check) {
1410 if (vk->is_payload_marked_as_null(addr)) {
1411 continue;
1412 }
1413 }
1414
1415 // offset in the array oop
1416 int offset = (int)(addr - cast_from_oop<address>(array));
1417 JvmtiHeapwalkObject elem(obj.obj(), offset, vk, fak->layout_kind());
1418
1419 visit_object(elem);
1420
1421 // check if iteration has been halted
1422 if (is_iteration_aborted()) {
1423 return;
1424 }
1425 }
1426 }
1427
1428 // Deprecated function to iterate over all objects in the heap
1429 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1430 Klass* klass,
1431 jvmtiHeapObjectCallback heap_object_callback,
1432 const void* user_data)
1433 {
1434 // EA based optimizations on tagged objects are already reverted.
1435 EscapeBarrier eb(object_filter == JVMTI_HEAP_OBJECT_UNTAGGED ||
1436 object_filter == JVMTI_HEAP_OBJECT_EITHER,
1437 JavaThread::current());
1438 eb.deoptimize_objects_all_threads();
1439 Arena dead_object_arena(mtServiceability);
1440 GrowableArray <jlong> dead_objects(&dead_object_arena, 10, 0, 0);
1441 {
1442 MutexLocker ml(Heap_lock);
1443 IterateOverHeapObjectClosure blk(this,
1444 klass,
1445 object_filter,
1446 heap_object_callback,
1447 user_data);
1448 VM_HeapIterateOperation op(&blk, &dead_objects);
1449 VMThread::execute(&op);
1450 }
1451 convert_flat_object_entries();
1452
1453 // Post events outside of Heap_lock
1454 post_dead_objects(&dead_objects);
1455 }
1456
1457
1458 // Iterates over all objects in the heap
1459 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1460 Klass* klass,
1461 const jvmtiHeapCallbacks* callbacks,
1462 const void* user_data)
1463 {
1464 // EA based optimizations on tagged objects are already reverted.
1465 EscapeBarrier eb(!(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED), JavaThread::current());
1466 eb.deoptimize_objects_all_threads();
1467
1468 Arena dead_object_arena(mtServiceability);
1469 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
1470 {
1471 MutexLocker ml(Heap_lock);
1472 IterateThroughHeapObjectClosure blk(this,
1473 klass,
1474 heap_filter,
1475 callbacks,
1476 user_data);
1477 VM_HeapIterateOperation op(&blk, &dead_objects);
1478 VMThread::execute(&op);
1479 }
1480 convert_flat_object_entries();
1481
1482 // Post events outside of Heap_lock
1483 post_dead_objects(&dead_objects);
1484 }
1485
1486 void JvmtiTagMap::remove_dead_entries_locked(GrowableArray<jlong>* objects) {
1487 assert(is_locked(), "precondition");
1488 if (_needs_cleaning) {
1489 // Recheck whether to post object free events under the lock.
1490 if (!env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1491 objects = nullptr;
1492 }
1493 log_info(jvmti, table)("TagMap table needs cleaning%s",
1494 ((objects != nullptr) ? " and posting" : ""));
1495 _hashmap->remove_dead_entries(objects);
1496 _needs_cleaning = false;
1497 }
1498 }
1499
1500 void JvmtiTagMap::remove_dead_entries(GrowableArray<jlong>* objects) {
1501 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1502 remove_dead_entries_locked(objects);
1503 }
1504
1505 void JvmtiTagMap::post_dead_objects(GrowableArray<jlong>* const objects) {
1506 assert(Thread::current()->is_Java_thread(), "Must post from JavaThread");
1507 if (objects != nullptr && objects->length() > 0) {
1508 JvmtiExport::post_object_free(env(), objects);
1509 log_info(jvmti, table)("%d free object posted", objects->length());
1510 }
1511 }
1512
1513 void JvmtiTagMap::remove_and_post_dead_objects() {
1514 ResourceMark rm;
1515 GrowableArray<jlong> objects;
1516 remove_dead_entries(&objects);
1517 post_dead_objects(&objects);
1518 }
1519
1520 void JvmtiTagMap::flush_object_free_events() {
1521 assert_not_at_safepoint();
1522 if (env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1523 {
1524 // The other thread can block for safepoints during event callbacks, so ensure we
1525 // are safepoint-safe while waiting.
1526 ThreadBlockInVM tbivm(JavaThread::current());
1527 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1528 while (_posting_events) {
1529 ml.wait();
1530 }
1531
1532 if (!_needs_cleaning || is_empty()) {
1533 _needs_cleaning = false;
1534 return;
1535 }
1536 _posting_events = true;
1537 } // Drop the lock so we can do the cleaning on the VM thread.
1538 // Needs both cleaning and event posting (up to some other thread
1539 // getting there first after we dropped the lock).
1540 remove_and_post_dead_objects();
1541 {
1542 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1543 _posting_events = false;
1544 ml.notify_all();
1545 }
1546 } else {
1547 remove_dead_entries(nullptr);
1548 }
1549 }
1550
1551 // support class for get_objects_with_tags
1552
1553 class TagObjectCollector : public JvmtiTagMapKeyClosure {
1554 private:
1555 JvmtiEnv* _env;
1556 JavaThread* _thread;
1557 jlong* _tags;
1558 jint _tag_count;
1559 bool _some_dead_found;
1560
1561 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
1562 GrowableArray<uint64_t>* _tag_results; // collected tags
1563
1564 public:
1565 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) :
1566 _env(env),
1567 _thread(JavaThread::current()),
1568 _tags((jlong*)tags),
1569 _tag_count(tag_count),
1570 _some_dead_found(false),
1571 _object_results(new (mtServiceability) GrowableArray<jobject>(1, mtServiceability)),
1572 _tag_results(new (mtServiceability) GrowableArray<uint64_t>(1, mtServiceability)) { }
1573
1574 ~TagObjectCollector() {
1575 delete _object_results;
1576 delete _tag_results;
1577 }
1578
1579 bool some_dead_found() const { return _some_dead_found; }
1580
1581 // for each tagged object check if the tag value matches
1582 // - if it matches then we create a JNI local reference to the object
1583 // and record the reference and tag value.
1584 // Always return true so the iteration continues.
1585 bool do_entry(JvmtiTagMapKey& key, jlong& value) {
1586 for (int i = 0; i < _tag_count; i++) {
1587 if (_tags[i] == value) {
1588 // The reference in this tag map could be the only (implicitly weak)
1589 // reference to that object. If we hand it out, we need to keep it live wrt
1590 // SATB marking similar to other j.l.ref.Reference referents. This is
1591 // achieved by using a phantom load in the object() accessor.
1592 oop o = key.object();
1593 if (o == nullptr) {
1594 _some_dead_found = true;
1595 // skip this whole entry
1596 return true;
1597 }
1598 assert(o != nullptr && Universe::heap()->is_in(o), "sanity check");
1599 jobject ref = JNIHandles::make_local(_thread, o);
1600 _object_results->append(ref);
1601 _tag_results->append(value);
1602 }
1603 }
1604 return true;
1605 }
1606
1607 // return the results from the collection
1608 //
1609 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1610 jvmtiError error;
1611 int count = _object_results->length();
1612 assert(count >= 0, "sanity check");
1613
1614 // if object_result_ptr is not null then allocate the result and copy
1615 // in the object references.
1616 if (object_result_ptr != nullptr) {
1617 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1618 if (error != JVMTI_ERROR_NONE) {
1619 return error;
1620 }
1621 for (int i=0; i<count; i++) {
1622 (*object_result_ptr)[i] = _object_results->at(i);
1623 }
1624 }
1625
1626 // if tag_result_ptr is not null then allocate the result and copy
1627 // in the tag values.
1628 if (tag_result_ptr != nullptr) {
1629 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1630 if (error != JVMTI_ERROR_NONE) {
1631 if (object_result_ptr != nullptr) {
1632 _env->Deallocate((unsigned char*)object_result_ptr);
1633 }
1634 return error;
1635 }
1636 for (int i=0; i<count; i++) {
1637 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1638 }
1639 }
1640
1641 *count_ptr = count;
1642 return JVMTI_ERROR_NONE;
1643 }
1644 };
1645
1646 // return the list of objects with the specified tags
1647 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1648 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1649
1650 // ensure flat object conversion is completed
1651 convert_flat_object_entries();
1652
1653 TagObjectCollector collector(env(), tags, count);
1654 {
1655 // iterate over all tagged objects
1656 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1657 // Can't post ObjectFree events here from a JavaThread, so this
1658 // will race with the gc_notification thread in the tiny
1659 // window where the object is not marked but hasn't been notified that
1660 // it is collected yet.
1661 _hashmap->entry_iterate(&collector);
1662 }
1663 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1664 }
1665
1666 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1667 // (not performance critical as only used for roots)
1668 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1669 switch (kind) {
1670 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1671 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1672 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
1673 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
1674 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
1675 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
1676 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
1677 }
1678 }
1679
1680 // Base class for all heap walk contexts. The base class maintains a flag
1681 // to indicate if the context is valid or not.
1682 class HeapWalkContext {
1683 private:
1684 bool _valid;
1685 public:
1686 HeapWalkContext(bool valid) { _valid = valid; }
1687 void invalidate() { _valid = false; }
1688 bool is_valid() const { return _valid; }
1689 };
1690
1691 // A basic heap walk context for the deprecated heap walking functions.
1692 // The context for a basic heap walk are the callbacks and fields used by
1693 // the referrer caching scheme.
1694 class BasicHeapWalkContext: public HeapWalkContext {
1695 private:
1696 jvmtiHeapRootCallback _heap_root_callback;
1697 jvmtiStackReferenceCallback _stack_ref_callback;
1698 jvmtiObjectReferenceCallback _object_ref_callback;
1699
1700 // used for caching
1701 JvmtiHeapwalkObject _last_referrer;
1702 jlong _last_referrer_tag;
1703
1704 public:
1705 BasicHeapWalkContext() : HeapWalkContext(false) { }
1706
1707 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1708 jvmtiStackReferenceCallback stack_ref_callback,
1709 jvmtiObjectReferenceCallback object_ref_callback) :
1710 HeapWalkContext(true),
1711 _heap_root_callback(heap_root_callback),
1712 _stack_ref_callback(stack_ref_callback),
1713 _object_ref_callback(object_ref_callback),
1714 _last_referrer(),
1715 _last_referrer_tag(0) {
1716 }
1717
1718 // accessors
1719 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
1720 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
1721 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
1722
1723 JvmtiHeapwalkObject last_referrer() const { return _last_referrer; }
1724 void set_last_referrer(const JvmtiHeapwalkObject& referrer) { _last_referrer = referrer; }
1725 jlong last_referrer_tag() const { return _last_referrer_tag; }
1726 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1727 };
1728
1729 // The advanced heap walk context for the FollowReferences functions.
1730 // The context is the callbacks, and the fields used for filtering.
1731 class AdvancedHeapWalkContext: public HeapWalkContext {
1732 private:
1733 jint _heap_filter;
1734 Klass* _klass_filter;
1735 const jvmtiHeapCallbacks* _heap_callbacks;
1736
1737 public:
1738 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1739
1740 AdvancedHeapWalkContext(jint heap_filter,
1741 Klass* klass_filter,
1742 const jvmtiHeapCallbacks* heap_callbacks) :
1743 HeapWalkContext(true),
1744 _heap_filter(heap_filter),
1745 _klass_filter(klass_filter),
1746 _heap_callbacks(heap_callbacks) {
1747 }
1748
1749 // accessors
1750 jint heap_filter() const { return _heap_filter; }
1751 Klass* klass_filter() const { return _klass_filter; }
1752
1753 jvmtiHeapReferenceCallback heap_reference_callback() const {
1754 return _heap_callbacks->heap_reference_callback;
1755 };
1756 jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1757 return _heap_callbacks->primitive_field_callback;
1758 }
1759 jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1760 return _heap_callbacks->array_primitive_value_callback;
1761 }
1762 jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1763 return _heap_callbacks->string_primitive_value_callback;
1764 }
1765 };
1766
1767 // The CallbackInvoker is a class with static functions that the heap walk can call
1768 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1769 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1770 // mode is for the newer FollowReferences function which supports a lot of
1771 // additional callbacks.
1772 class CallbackInvoker : AllStatic {
1773 private:
1774 // heap walk styles
1775 enum { basic, advanced };
1776 static int _heap_walk_type;
1777 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
1778 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
1779
1780 // context for basic style heap walk
1781 static BasicHeapWalkContext _basic_context;
1782 static BasicHeapWalkContext* basic_context() {
1783 assert(_basic_context.is_valid(), "invalid");
1784 return &_basic_context;
1785 }
1786
1787 // context for advanced style heap walk
1788 static AdvancedHeapWalkContext _advanced_context;
1789 static AdvancedHeapWalkContext* advanced_context() {
1790 assert(_advanced_context.is_valid(), "invalid");
1791 return &_advanced_context;
1792 }
1793
1794 // context needed for all heap walks
1795 static JvmtiTagMap* _tag_map;
1796 static const void* _user_data;
1797 static JvmtiHeapwalkVisitStack* _visit_stack;
1798
1799 // accessors
1800 static JvmtiTagMap* tag_map() { return _tag_map; }
1801 static const void* user_data() { return _user_data; }
1802 static JvmtiHeapwalkVisitStack* visit_stack() { return _visit_stack; }
1803
1804 // if the object hasn't been visited then push it onto the visit stack
1805 // so that it will be visited later
1806 static inline bool check_for_visit(const JvmtiHeapwalkObject&obj) {
1807 visit_stack()->check_for_visit(obj);
1808 return true;
1809 }
1810
1811 // return element count if the obj is array, -1 otherwise
1812 static jint get_array_length(const JvmtiHeapwalkObject& obj) {
1813 if (!obj.klass()->is_array_klass()) {
1814 return -1;
1815 }
1816 assert(!obj.is_flat(), "array cannot be flat");
1817 return (jint)arrayOop(obj.obj())->length();
1818 }
1819
1820 // invoke basic style callbacks
1821 static inline bool invoke_basic_heap_root_callback
1822 (jvmtiHeapRootKind root_kind, const JvmtiHeapwalkObject& obj);
1823 static inline bool invoke_basic_stack_ref_callback
1824 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1825 int slot, const JvmtiHeapwalkObject& obj);
1826 static inline bool invoke_basic_object_reference_callback
1827 (jvmtiObjectReferenceKind ref_kind, const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index);
1828
1829 // invoke advanced style callbacks
1830 static inline bool invoke_advanced_heap_root_callback
1831 (jvmtiHeapReferenceKind ref_kind, const JvmtiHeapwalkObject& obj);
1832 static inline bool invoke_advanced_stack_ref_callback
1833 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1834 jmethodID method, jlocation bci, jint slot, const JvmtiHeapwalkObject& obj);
1835 static inline bool invoke_advanced_object_reference_callback
1836 (jvmtiHeapReferenceKind ref_kind, const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index);
1837
1838 // used to report the value of primitive fields
1839 static inline bool report_primitive_field
1840 (jvmtiHeapReferenceKind ref_kind, const JvmtiHeapwalkObject& obj, jint index, address addr, char type);
1841
1842 public:
1843 // initialize for basic mode
1844 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1845 const void* user_data,
1846 BasicHeapWalkContext context,
1847 JvmtiHeapwalkVisitStack* visit_stack);
1848
1849 // initialize for advanced mode
1850 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1851 const void* user_data,
1852 AdvancedHeapWalkContext context,
1853 JvmtiHeapwalkVisitStack* visit_stack);
1854
1855 // functions to report roots
1856 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, const JvmtiHeapwalkObject& o);
1857 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1858 jmethodID m, const JvmtiHeapwalkObject& o);
1859 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1860 jmethodID method, jlocation bci, jint slot, const JvmtiHeapwalkObject& o);
1861
1862 // functions to report references
1863 static inline bool report_array_element_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index);
1864 static inline bool report_class_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1865 static inline bool report_class_loader_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1866 static inline bool report_signers_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1867 static inline bool report_protection_domain_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1868 static inline bool report_superclass_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1869 static inline bool report_interface_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree);
1870 static inline bool report_static_field_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint slot);
1871 static inline bool report_field_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint slot);
1872 static inline bool report_constant_pool_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index);
1873 static inline bool report_primitive_array_values(const JvmtiHeapwalkObject& array);
1874 static inline bool report_string_value(const JvmtiHeapwalkObject& str);
1875 static inline bool report_primitive_instance_field(const JvmtiHeapwalkObject& o, jint index, address value, char type);
1876 static inline bool report_primitive_static_field(const JvmtiHeapwalkObject& o, jint index, address value, char type);
1877 };
1878
1879 // statics
1880 int CallbackInvoker::_heap_walk_type;
1881 BasicHeapWalkContext CallbackInvoker::_basic_context;
1882 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1883 JvmtiTagMap* CallbackInvoker::_tag_map;
1884 const void* CallbackInvoker::_user_data;
1885 JvmtiHeapwalkVisitStack* CallbackInvoker::_visit_stack;
1886
1887 // initialize for basic heap walk (IterateOverReachableObjects et al)
1888 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1889 const void* user_data,
1890 BasicHeapWalkContext context,
1891 JvmtiHeapwalkVisitStack* visit_stack) {
1892 _tag_map = tag_map;
1893 _user_data = user_data;
1894 _basic_context = context;
1895 _advanced_context.invalidate(); // will trigger assertion if used
1896 _heap_walk_type = basic;
1897 _visit_stack = visit_stack;
1898 }
1899
1900 // initialize for advanced heap walk (FollowReferences)
1901 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1902 const void* user_data,
1903 AdvancedHeapWalkContext context,
1904 JvmtiHeapwalkVisitStack* visit_stack) {
1905 _tag_map = tag_map;
1906 _user_data = user_data;
1907 _advanced_context = context;
1908 _basic_context.invalidate(); // will trigger assertion if used
1909 _heap_walk_type = advanced;
1910 _visit_stack = visit_stack;
1911 }
1912
1913
1914 // invoke basic style heap root callback
1915 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, const JvmtiHeapwalkObject& obj) {
1916 // if we heap roots should be reported
1917 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1918 if (cb == nullptr) {
1919 return check_for_visit(obj);
1920 }
1921
1922 CallbackWrapper wrapper(tag_map(), obj);
1923 jvmtiIterationControl control = (*cb)(root_kind,
1924 wrapper.klass_tag(),
1925 wrapper.obj_size(),
1926 wrapper.obj_tag_p(),
1927 (void*)user_data());
1928 // push root to visit stack when following references
1929 if (control == JVMTI_ITERATION_CONTINUE &&
1930 basic_context()->object_ref_callback() != nullptr) {
1931 visit_stack()->push(obj);
1932 }
1933 return control != JVMTI_ITERATION_ABORT;
1934 }
1935
1936 // invoke basic style stack ref callback
1937 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1938 jlong thread_tag,
1939 jint depth,
1940 jmethodID method,
1941 int slot,
1942 const JvmtiHeapwalkObject& obj) {
1943 // if we stack refs should be reported
1944 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1945 if (cb == nullptr) {
1946 return check_for_visit(obj);
1947 }
1948
1949 CallbackWrapper wrapper(tag_map(), obj);
1950 jvmtiIterationControl control = (*cb)(root_kind,
1951 wrapper.klass_tag(),
1952 wrapper.obj_size(),
1953 wrapper.obj_tag_p(),
1954 thread_tag,
1955 depth,
1956 method,
1957 slot,
1958 (void*)user_data());
1959 // push root to visit stack when following references
1960 if (control == JVMTI_ITERATION_CONTINUE &&
1961 basic_context()->object_ref_callback() != nullptr) {
1962 visit_stack()->push(obj);
1963 }
1964 return control != JVMTI_ITERATION_ABORT;
1965 }
1966
1967 // invoke basic style object reference callback
1968 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
1969 const JvmtiHeapwalkObject& referrer,
1970 const JvmtiHeapwalkObject& referree,
1971 jint index) {
1972
1973 BasicHeapWalkContext* context = basic_context();
1974
1975 // callback requires the referrer's tag. If it's the same referrer
1976 // as the last call then we use the cached value.
1977 jlong referrer_tag;
1978 if (referrer == context->last_referrer()) {
1979 referrer_tag = context->last_referrer_tag();
1980 } else {
1981 referrer_tag = tag_map()->find(referrer);
1982 }
1983
1984 // do the callback
1985 CallbackWrapper wrapper(tag_map(), referree);
1986 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
1987 jvmtiIterationControl control = (*cb)(ref_kind,
1988 wrapper.klass_tag(),
1989 wrapper.obj_size(),
1990 wrapper.obj_tag_p(),
1991 referrer_tag,
1992 index,
1993 (void*)user_data());
1994
1995 // record referrer and referrer tag. For self-references record the
1996 // tag value from the callback as this might differ from referrer_tag.
1997 context->set_last_referrer(referrer);
1998 if (referrer == referree) {
1999 context->set_last_referrer_tag(*wrapper.obj_tag_p());
2000 } else {
2001 context->set_last_referrer_tag(referrer_tag);
2002 }
2003
2004 if (control == JVMTI_ITERATION_CONTINUE) {
2005 return check_for_visit(referree);
2006 } else {
2007 return control != JVMTI_ITERATION_ABORT;
2008 }
2009 }
2010
2011 // invoke advanced style heap root callback
2012 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2013 const JvmtiHeapwalkObject& obj) {
2014 AdvancedHeapWalkContext* context = advanced_context();
2015
2016 // check that callback is provided
2017 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2018 if (cb == nullptr) {
2019 return check_for_visit(obj);
2020 }
2021
2022 // apply class filter
2023 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2024 return check_for_visit(obj);
2025 }
2026
2027 // setup the callback wrapper
2028 CallbackWrapper wrapper(tag_map(), obj);
2029
2030 // apply tag filter
2031 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2032 wrapper.klass_tag(),
2033 context->heap_filter())) {
2034 return check_for_visit(obj);
2035 }
2036
2037 // for arrays we need the length, otherwise -1
2038 jint len = get_array_length(obj);
2039
2040 // invoke the callback
2041 jint res = (*cb)(ref_kind,
2042 nullptr, // referrer info
2043 wrapper.klass_tag(),
2044 0, // referrer_class_tag is 0 for heap root
2045 wrapper.obj_size(),
2046 wrapper.obj_tag_p(),
2047 nullptr, // referrer_tag_p
2048 len,
2049 (void*)user_data());
2050 if (res & JVMTI_VISIT_ABORT) {
2051 return false;// referrer class tag
2052 }
2053 if (res & JVMTI_VISIT_OBJECTS) {
2054 check_for_visit(obj);
2055 }
2056 return true;
2057 }
2058
2059 // report a reference from a thread stack to an object
2060 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2061 jlong thread_tag,
2062 jlong tid,
2063 int depth,
2064 jmethodID method,
2065 jlocation bci,
2066 jint slot,
2067 const JvmtiHeapwalkObject& obj) {
2068 AdvancedHeapWalkContext* context = advanced_context();
2069
2070 // check that callback is provider
2071 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2072 if (cb == nullptr) {
2073 return check_for_visit(obj);
2074 }
2075
2076 // apply class filter
2077 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2078 return check_for_visit(obj);
2079 }
2080
2081 // setup the callback wrapper
2082 CallbackWrapper wrapper(tag_map(), obj);
2083
2084 // apply tag filter
2085 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2086 wrapper.klass_tag(),
2087 context->heap_filter())) {
2088 return check_for_visit(obj);
2089 }
2090
2091 // setup the referrer info
2092 jvmtiHeapReferenceInfo reference_info;
2093 reference_info.stack_local.thread_tag = thread_tag;
2094 reference_info.stack_local.thread_id = tid;
2095 reference_info.stack_local.depth = depth;
2096 reference_info.stack_local.method = method;
2097 reference_info.stack_local.location = bci;
2098 reference_info.stack_local.slot = slot;
2099
2100 // for arrays we need the length, otherwise -1
2101 jint len = get_array_length(obj);
2102
2103 // call into the agent
2104 int res = (*cb)(ref_kind,
2105 &reference_info,
2106 wrapper.klass_tag(),
2107 0, // referrer_class_tag is 0 for heap root (stack)
2108 wrapper.obj_size(),
2109 wrapper.obj_tag_p(),
2110 nullptr, // referrer_tag is 0 for root
2111 len,
2112 (void*)user_data());
2113
2114 if (res & JVMTI_VISIT_ABORT) {
2115 return false;
2116 }
2117 if (res & JVMTI_VISIT_OBJECTS) {
2118 check_for_visit(obj);
2119 }
2120 return true;
2121 }
2122
2123 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2124 // only for ref_kinds defined by the JVM TI spec. Otherwise, null is passed.
2125 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
2126 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
2127 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2128 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2129 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
2130 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2131
2132 // invoke the object reference callback to report a reference
2133 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2134 const JvmtiHeapwalkObject& referrer,
2135 const JvmtiHeapwalkObject& obj,
2136 jint index)
2137 {
2138 // field index is only valid field in reference_info
2139 static jvmtiHeapReferenceInfo reference_info = { 0 };
2140
2141 AdvancedHeapWalkContext* context = advanced_context();
2142
2143 // check that callback is provider
2144 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2145 if (cb == nullptr) {
2146 return check_for_visit(obj);
2147 }
2148
2149 // apply class filter
2150 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2151 return check_for_visit(obj);
2152 }
2153
2154 // setup the callback wrapper
2155 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2156
2157 // apply tag filter
2158 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2159 wrapper.klass_tag(),
2160 context->heap_filter())) {
2161 return check_for_visit(obj);
2162 }
2163
2164 // field index is only valid field in reference_info
2165 reference_info.field.index = index;
2166
2167 // for arrays we need the length, otherwise -1
2168 jint len = get_array_length(obj);
2169
2170 // invoke the callback
2171 int res = (*cb)(ref_kind,
2172 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : nullptr,
2173 wrapper.klass_tag(),
2174 wrapper.referrer_klass_tag(),
2175 wrapper.obj_size(),
2176 wrapper.obj_tag_p(),
2177 wrapper.referrer_tag_p(),
2178 len,
2179 (void*)user_data());
2180
2181 if (res & JVMTI_VISIT_ABORT) {
2182 return false;
2183 }
2184 if (res & JVMTI_VISIT_OBJECTS) {
2185 check_for_visit(obj);
2186 }
2187 return true;
2188 }
2189
2190 // report a "simple root"
2191 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, const JvmtiHeapwalkObject& obj) {
2192 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2193 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2194
2195 if (is_basic_heap_walk()) {
2196 // map to old style root kind
2197 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2198 return invoke_basic_heap_root_callback(root_kind, obj);
2199 } else {
2200 assert(is_advanced_heap_walk(), "wrong heap walk type");
2201 return invoke_advanced_heap_root_callback(kind, obj);
2202 }
2203 }
2204
2205
2206 // invoke the primitive array values
2207 inline bool CallbackInvoker::report_primitive_array_values(const JvmtiHeapwalkObject& obj) {
2208 assert(obj.klass()->is_typeArray_klass(), "not a primitive array");
2209
2210 AdvancedHeapWalkContext* context = advanced_context();
2211 assert(context->array_primitive_value_callback() != nullptr, "no callback");
2212
2213 // apply class filter
2214 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2215 return true;
2216 }
2217
2218 CallbackWrapper wrapper(tag_map(), obj);
2219
2220 // apply tag filter
2221 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2222 wrapper.klass_tag(),
2223 context->heap_filter())) {
2224 return true;
2225 }
2226
2227 // invoke the callback
2228 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2229 &wrapper,
2230 obj,
2231 (void*)user_data());
2232 return (!(res & JVMTI_VISIT_ABORT));
2233 }
2234
2235 // invoke the string value callback
2236 inline bool CallbackInvoker::report_string_value(const JvmtiHeapwalkObject& str) {
2237 assert(str.klass() == vmClasses::String_klass(), "not a string");
2238
2239 AdvancedHeapWalkContext* context = advanced_context();
2240 assert(context->string_primitive_value_callback() != nullptr, "no callback");
2241
2242 // apply class filter
2243 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2244 return true;
2245 }
2246
2247 CallbackWrapper wrapper(tag_map(), str);
2248
2249 // apply tag filter
2250 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2251 wrapper.klass_tag(),
2252 context->heap_filter())) {
2253 return true;
2254 }
2255
2256 // invoke the callback
2257 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2258 &wrapper,
2259 str,
2260 (void*)user_data());
2261 return (!(res & JVMTI_VISIT_ABORT));
2262 }
2263
2264 // invoke the primitive field callback
2265 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2266 const JvmtiHeapwalkObject& obj,
2267 jint index,
2268 address addr,
2269 char type)
2270 {
2271 // for primitive fields only the index will be set
2272 static jvmtiHeapReferenceInfo reference_info = { 0 };
2273
2274 AdvancedHeapWalkContext* context = advanced_context();
2275 assert(context->primitive_field_callback() != nullptr, "no callback");
2276
2277 // apply class filter
2278 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2279 return true;
2280 }
2281
2282 CallbackWrapper wrapper(tag_map(), obj);
2283
2284 // apply tag filter
2285 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2286 wrapper.klass_tag(),
2287 context->heap_filter())) {
2288 return true;
2289 }
2290
2291 // the field index in the referrer
2292 reference_info.field.index = index;
2293
2294 // map the type
2295 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2296
2297 // setup the jvalue
2298 jvalue value;
2299 copy_to_jvalue(&value, addr, value_type);
2300
2301 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2302 int res = (*cb)(ref_kind,
2303 &reference_info,
2304 wrapper.klass_tag(),
2305 wrapper.obj_tag_p(),
2306 value,
2307 value_type,
2308 (void*)user_data());
2309 return (!(res & JVMTI_VISIT_ABORT));
2310 }
2311
2312
2313 // instance field
2314 inline bool CallbackInvoker::report_primitive_instance_field(const JvmtiHeapwalkObject& obj,
2315 jint index,
2316 address value,
2317 char type) {
2318 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2319 obj,
2320 index,
2321 value,
2322 type);
2323 }
2324
2325 // static field
2326 inline bool CallbackInvoker::report_primitive_static_field(const JvmtiHeapwalkObject& obj,
2327 jint index,
2328 address value,
2329 char type) {
2330 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2331 obj,
2332 index,
2333 value,
2334 type);
2335 }
2336
2337 // report a JNI local (root object) to the profiler
2338 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, const JvmtiHeapwalkObject& obj) {
2339 if (is_basic_heap_walk()) {
2340 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2341 thread_tag,
2342 depth,
2343 m,
2344 -1,
2345 obj);
2346 } else {
2347 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2348 thread_tag, tid,
2349 depth,
2350 m,
2351 (jlocation)-1,
2352 -1,
2353 obj);
2354 }
2355 }
2356
2357
2358 // report a local (stack reference, root object)
2359 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2360 jlong tid,
2361 jint depth,
2362 jmethodID method,
2363 jlocation bci,
2364 jint slot,
2365 const JvmtiHeapwalkObject& obj) {
2366 if (is_basic_heap_walk()) {
2367 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2368 thread_tag,
2369 depth,
2370 method,
2371 slot,
2372 obj);
2373 } else {
2374 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2375 thread_tag,
2376 tid,
2377 depth,
2378 method,
2379 bci,
2380 slot,
2381 obj);
2382 }
2383 }
2384
2385 // report an object referencing a class.
2386 inline bool CallbackInvoker::report_class_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2387 if (is_basic_heap_walk()) {
2388 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2389 } else {
2390 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2391 }
2392 }
2393
2394 // report a class referencing its class loader.
2395 inline bool CallbackInvoker::report_class_loader_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2396 if (is_basic_heap_walk()) {
2397 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2398 } else {
2399 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2400 }
2401 }
2402
2403 // report a class referencing its signers.
2404 inline bool CallbackInvoker::report_signers_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2405 if (is_basic_heap_walk()) {
2406 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2407 } else {
2408 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2409 }
2410 }
2411
2412 // report a class referencing its protection domain..
2413 inline bool CallbackInvoker::report_protection_domain_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2414 if (is_basic_heap_walk()) {
2415 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2416 } else {
2417 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2418 }
2419 }
2420
2421 // report a class referencing its superclass.
2422 inline bool CallbackInvoker::report_superclass_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2423 if (is_basic_heap_walk()) {
2424 // Send this to be consistent with past implementation
2425 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2426 } else {
2427 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2428 }
2429 }
2430
2431 // report a class referencing one of its interfaces.
2432 inline bool CallbackInvoker::report_interface_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree) {
2433 if (is_basic_heap_walk()) {
2434 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2435 } else {
2436 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2437 }
2438 }
2439
2440 // report a class referencing one of its static fields.
2441 inline bool CallbackInvoker::report_static_field_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint slot) {
2442 if (is_basic_heap_walk()) {
2443 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2444 } else {
2445 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2446 }
2447 }
2448
2449 // report an array referencing an element object
2450 inline bool CallbackInvoker::report_array_element_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index) {
2451 if (is_basic_heap_walk()) {
2452 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2453 } else {
2454 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2455 }
2456 }
2457
2458 // report an object referencing an instance field object
2459 inline bool CallbackInvoker::report_field_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint slot) {
2460 if (is_basic_heap_walk()) {
2461 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2462 } else {
2463 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2464 }
2465 }
2466
2467 // report an array referencing an element object
2468 inline bool CallbackInvoker::report_constant_pool_reference(const JvmtiHeapwalkObject& referrer, const JvmtiHeapwalkObject& referree, jint index) {
2469 if (is_basic_heap_walk()) {
2470 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2471 } else {
2472 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2473 }
2474 }
2475
2476 // A supporting closure used to process simple roots
2477 class SimpleRootsClosure : public OopClosure {
2478 private:
2479 jvmtiHeapReferenceKind _kind;
2480 bool _continue;
2481
2482 jvmtiHeapReferenceKind root_kind() { return _kind; }
2483
2484 public:
2485 void set_kind(jvmtiHeapReferenceKind kind) {
2486 _kind = kind;
2487 _continue = true;
2488 }
2489
2490 inline bool stopped() {
2491 return !_continue;
2492 }
2493
2494 void do_oop(oop* obj_p) {
2495 // iteration has terminated
2496 if (stopped()) {
2497 return;
2498 }
2499
2500 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p);
2501 // ignore null
2502 if (o == nullptr) {
2503 return;
2504 }
2505
2506 assert(Universe::heap()->is_in(o), "should be impossible");
2507
2508 jvmtiHeapReferenceKind kind = root_kind();
2509
2510 // invoke the callback
2511 _continue = CallbackInvoker::report_simple_root(kind, o);
2512
2513 }
2514 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2515 };
2516
2517 // A supporting closure used to process ClassLoaderData roots.
2518 class CLDRootsClosure: public OopClosure {
2519 private:
2520 bool _continue;
2521 public:
2522 CLDRootsClosure(): _continue(true) {}
2523
2524 inline bool stopped() {
2525 return !_continue;
2526 }
2527
2528 void do_oop(oop* obj_p) {
2529 if (stopped()) {
2530 return;
2531 }
2532
2533 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p);
2534 // ignore null
2535 if (o == nullptr) {
2536 return;
2537 }
2538
2539 jvmtiHeapReferenceKind kind = JVMTI_HEAP_REFERENCE_OTHER;
2540 if (o->klass() == vmClasses::Class_klass()) {
2541 kind = JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
2542 }
2543
2544 // invoke the callback
2545 _continue = CallbackInvoker::report_simple_root(kind, o);
2546 }
2547 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2548 };
2549
2550 // A supporting closure used to process JNI locals
2551 class JNILocalRootsClosure : public OopClosure {
2552 private:
2553 jlong _thread_tag;
2554 jlong _tid;
2555 jint _depth;
2556 jmethodID _method;
2557 bool _continue;
2558 public:
2559 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2560 _thread_tag = thread_tag;
2561 _tid = tid;
2562 _depth = depth;
2563 _method = method;
2564 _continue = true;
2565 }
2566
2567 inline bool stopped() {
2568 return !_continue;
2569 }
2570
2571 void do_oop(oop* obj_p) {
2572 // iteration has terminated
2573 if (stopped()) {
2574 return;
2575 }
2576
2577 oop o = *obj_p;
2578 // ignore null
2579 if (o == nullptr) {
2580 return;
2581 }
2582
2583 // invoke the callback
2584 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2585 }
2586 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2587 };
2588
2589 // Helper class to collect/report stack references.
2590 class StackRefCollector {
2591 private:
2592 JvmtiTagMap* _tag_map;
2593 JNILocalRootsClosure* _blk;
2594 // java_thread is needed only to report JNI local on top native frame;
2595 // I.e. it's required only for platform/carrier threads or mounted virtual threads.
2596 JavaThread* _java_thread;
2597
2598 oop _threadObj;
2599 jlong _thread_tag;
2600 jlong _tid;
2601
2602 bool _is_top_frame;
2603 int _depth;
2604 frame* _last_entry_frame;
2605
2606 bool report_java_stack_refs(StackValueCollection* values, jmethodID method, jlocation bci, jint slot_offset);
2607 bool report_native_stack_refs(jmethodID method);
2608
2609 public:
2610 StackRefCollector(JvmtiTagMap* tag_map, JNILocalRootsClosure* blk, JavaThread* java_thread)
2611 : _tag_map(tag_map), _blk(blk), _java_thread(java_thread),
2612 _threadObj(nullptr), _thread_tag(0), _tid(0),
2613 _is_top_frame(true), _depth(0), _last_entry_frame(nullptr)
2614 {
2615 }
2616
2617 bool set_thread(oop o);
2618 // Sets the thread and reports the reference to it with the specified kind.
2619 bool set_thread(jvmtiHeapReferenceKind kind, oop o);
2620
2621 bool do_frame(vframe* vf);
2622 // Handles frames until vf->sender() is null.
2623 bool process_frames(vframe* vf);
2624 };
2625
2626 bool StackRefCollector::set_thread(oop o) {
2627 _threadObj = o;
2628 _thread_tag = _tag_map->find(_threadObj);
2629 _tid = java_lang_Thread::thread_id(_threadObj);
2630
2631 _is_top_frame = true;
2632 _depth = 0;
2633 _last_entry_frame = nullptr;
2634
2635 return true;
2636 }
2637
2638 bool StackRefCollector::set_thread(jvmtiHeapReferenceKind kind, oop o) {
2639 return set_thread(o)
2640 && CallbackInvoker::report_simple_root(kind, _threadObj);
2641 }
2642
2643 bool StackRefCollector::report_java_stack_refs(StackValueCollection* values, jmethodID method, jlocation bci, jint slot_offset) {
2644 for (int index = 0; index < values->size(); index++) {
2645 if (values->at(index)->type() == T_OBJECT) {
2646 oop obj = values->obj_at(index)();
2647 if (obj == nullptr) {
2648 continue;
2649 }
2650 // stack reference
2651 if (!CallbackInvoker::report_stack_ref_root(_thread_tag, _tid, _depth, method,
2652 bci, slot_offset + index, obj)) {
2653 return false;
2654 }
2655 }
2656 }
2657 return true;
2658 }
2659
2660 bool StackRefCollector::report_native_stack_refs(jmethodID method) {
2661 _blk->set_context(_thread_tag, _tid, _depth, method);
2662 if (_is_top_frame) {
2663 // JNI locals for the top frame.
2664 if (_java_thread != nullptr) {
2665 _java_thread->active_handles()->oops_do(_blk);
2666 if (_blk->stopped()) {
2667 return false;
2668 }
2669 }
2670 } else {
2671 if (_last_entry_frame != nullptr) {
2672 // JNI locals for the entry frame.
2673 assert(_last_entry_frame->is_entry_frame(), "checking");
2674 _last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(_blk);
2675 if (_blk->stopped()) {
2676 return false;
2677 }
2678 }
2679 }
2680 return true;
2681 }
2682
2683 bool StackRefCollector::do_frame(vframe* vf) {
2684 if (vf->is_java_frame()) {
2685 // java frame (interpreted, compiled, ...)
2686 javaVFrame* jvf = javaVFrame::cast(vf);
2687
2688 jmethodID method = jvf->method()->jmethod_id();
2689
2690 if (!(jvf->method()->is_native())) {
2691 jlocation bci = (jlocation)jvf->bci();
2692 StackValueCollection* locals = jvf->locals();
2693 if (!report_java_stack_refs(locals, method, bci, 0)) {
2694 return false;
2695 }
2696 if (!report_java_stack_refs(jvf->expressions(), method, bci, locals->size())) {
2697 return false;
2698 }
2699
2700 // Follow oops from compiled nmethod.
2701 if (jvf->cb() != nullptr && jvf->cb()->is_nmethod()) {
2702 _blk->set_context(_thread_tag, _tid, _depth, method);
2703 // Need to apply load barriers for unmounted vthreads.
2704 nmethod* nm = jvf->cb()->as_nmethod();
2705 nm->run_nmethod_entry_barrier();
2706 nm->oops_do(_blk);
2707 if (_blk->stopped()) {
2708 return false;
2709 }
2710 }
2711 } else {
2712 // native frame
2713 if (!report_native_stack_refs(method)) {
2714 return false;
2715 }
2716 }
2717 _last_entry_frame = nullptr;
2718 _depth++;
2719 } else {
2720 // externalVFrame - for an entry frame then we report the JNI locals
2721 // when we find the corresponding javaVFrame
2722 frame* fr = vf->frame_pointer();
2723 assert(fr != nullptr, "sanity check");
2724 if (fr->is_entry_frame()) {
2725 _last_entry_frame = fr;
2726 }
2727 }
2728
2729 _is_top_frame = false;
2730
2731 return true;
2732 }
2733
2734 bool StackRefCollector::process_frames(vframe* vf) {
2735 while (vf != nullptr) {
2736 if (!do_frame(vf)) {
2737 return false;
2738 }
2739 vf = vf->sender();
2740 }
2741 return true;
2742 }
2743
2744
2745 // A VM operation to iterate over objects that are reachable from
2746 // a set of roots or an initial object.
2747 //
2748 // For VM_HeapWalkOperation the set of roots used is :-
2749 //
2750 // - All JNI global references
2751 // - All inflated monitors
2752 // - All classes loaded by the boot class loader (or all classes
2753 // in the event that class unloading is disabled)
2754 // - All java threads
2755 // - For each java thread then all locals and JNI local references
2756 // on the thread's execution stack
2757 // - All visible/explainable objects from Universes::oops_do
2758 //
2759 class VM_HeapWalkOperation: public VM_Operation {
2760 private:
2761 bool _is_advanced_heap_walk; // indicates FollowReferences
2762 JvmtiTagMap* _tag_map;
2763 Handle _initial_object;
2764 JvmtiHeapwalkVisitStack _visit_stack;
2765
2766 // Dead object tags in JvmtiTagMap
2767 GrowableArray<jlong>* _dead_objects;
2768
2769 bool _following_object_refs; // are we following object references
2770
2771 bool _reporting_primitive_fields; // optional reporting
2772 bool _reporting_primitive_array_values;
2773 bool _reporting_string_values;
2774
2775 // accessors
2776 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
2777 JvmtiTagMap* tag_map() const { return _tag_map; }
2778 Handle initial_object() const { return _initial_object; }
2779
2780 bool is_following_references() const { return _following_object_refs; }
2781
2782 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
2783 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2784 bool is_reporting_string_values() const { return _reporting_string_values; }
2785
2786 JvmtiHeapwalkVisitStack* visit_stack() { return &_visit_stack; }
2787
2788 // iterate over the various object types
2789 inline bool iterate_over_array(const JvmtiHeapwalkObject& o);
2790 inline bool iterate_over_flat_array(const JvmtiHeapwalkObject& o);
2791 inline bool iterate_over_type_array(const JvmtiHeapwalkObject& o);
2792 inline bool iterate_over_class(const JvmtiHeapwalkObject& o);
2793 inline bool iterate_over_object(const JvmtiHeapwalkObject& o);
2794
2795 // root collection
2796 inline bool collect_simple_roots();
2797 inline bool collect_stack_roots();
2798 inline bool collect_stack_refs(JavaThread* java_thread, JNILocalRootsClosure* blk);
2799 inline bool collect_vthread_stack_refs(oop vt);
2800
2801 // visit an object
2802 inline bool visit(const JvmtiHeapwalkObject& o);
2803
2804 public:
2805 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2806 Handle initial_object,
2807 BasicHeapWalkContext callbacks,
2808 const void* user_data,
2809 GrowableArray<jlong>* objects);
2810
2811 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2812 Handle initial_object,
2813 AdvancedHeapWalkContext callbacks,
2814 const void* user_data,
2815 GrowableArray<jlong>* objects);
2816
2817 ~VM_HeapWalkOperation();
2818
2819 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2820 void doit();
2821 };
2822
2823
2824 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2825 Handle initial_object,
2826 BasicHeapWalkContext callbacks,
2827 const void* user_data,
2828 GrowableArray<jlong>* objects) {
2829 _is_advanced_heap_walk = false;
2830 _tag_map = tag_map;
2831 _initial_object = initial_object;
2832 _following_object_refs = (callbacks.object_ref_callback() != nullptr);
2833 _reporting_primitive_fields = false;
2834 _reporting_primitive_array_values = false;
2835 _reporting_string_values = false;
2836 _dead_objects = objects;
2837 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, user_data, callbacks, &_visit_stack);
2838 }
2839
2840 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2841 Handle initial_object,
2842 AdvancedHeapWalkContext callbacks,
2843 const void* user_data,
2844 GrowableArray<jlong>* objects) {
2845 _is_advanced_heap_walk = true;
2846 _tag_map = tag_map;
2847 _initial_object = initial_object;
2848 _following_object_refs = true;
2849 _reporting_primitive_fields = (callbacks.primitive_field_callback() != nullptr);;
2850 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != nullptr);;
2851 _reporting_string_values = (callbacks.string_primitive_value_callback() != nullptr);;
2852 _dead_objects = objects;
2853 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, user_data, callbacks, &_visit_stack);
2854 }
2855
2856 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2857 }
2858
2859 // an array references its class and has a reference to
2860 // each element in the array
2861 inline bool VM_HeapWalkOperation::iterate_over_array(const JvmtiHeapwalkObject& o) {
2862 assert(!o.is_flat(), "Array object cannot be flattened");
2863 refArrayOop array = oop_cast<refArrayOop>(o.obj());
2864
2865 // array reference to its class
2866 oop mirror = array->klass()->java_mirror();
2867 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2868 return false;
2869 }
2870
2871 // iterate over the array and report each reference to a
2872 // non-null element
2873 for (int index=0; index<array->length(); index++) {
2874 oop elem = array->obj_at(index);
2875 if (elem == nullptr) {
2876 continue;
2877 }
2878
2879 // report the array reference o[index] = elem
2880 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2881 return false;
2882 }
2883 }
2884 return true;
2885 }
2886
2887 // similar to iterate_over_array(), but itrates over flat array
2888 inline bool VM_HeapWalkOperation::iterate_over_flat_array(const JvmtiHeapwalkObject& o) {
2889 assert(!o.is_flat(), "Array object cannot be flattened");
2890 flatArrayOop array = flatArrayOop(o.obj());
2891 FlatArrayKlass* fak = array->klass();
2892 InlineKlass* vk = fak->element_klass();
2893 bool need_null_check = LayoutKindHelper::is_nullable_flat(fak->layout_kind());
2894
2895 // array reference to its class
2896 oop mirror = fak->java_mirror();
2897 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2898 return false;
2899 }
2900
2901 // iterate over the array and report each reference to a
2902 // non-null element
2903 for (int index = 0; index < array->length(); index++) {
2904 address addr = (address)array->value_at_addr(index, fak->layout_helper());
2905
2906 // check for null
2907 if (need_null_check) {
2908 if (vk->is_payload_marked_as_null(addr)) {
2909 continue;
2910 }
2911 }
2912
2913 // offset in the array oop
2914 int offset = (int)(addr - cast_from_oop<address>(array));
2915 JvmtiHeapwalkObject elem(o.obj(), offset, vk, fak->layout_kind());
2916
2917 // report the array reference
2918 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2919 return false;
2920 }
2921 }
2922 return true;
2923 }
2924
2925 // a type array references its class
2926 inline bool VM_HeapWalkOperation::iterate_over_type_array(const JvmtiHeapwalkObject& o) {
2927 assert(!o.is_flat(), "Array object cannot be flattened");
2928 Klass* k = o.klass();
2929 oop mirror = k->java_mirror();
2930 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2931 return false;
2932 }
2933
2934 // report the array contents if required
2935 if (is_reporting_primitive_array_values()) {
2936 if (!CallbackInvoker::report_primitive_array_values(o)) {
2937 return false;
2938 }
2939 }
2940 return true;
2941 }
2942
2943 #ifdef ASSERT
2944 // verify that a static oop field is in range
2945 static inline bool verify_static_oop(InstanceKlass* ik,
2946 oop mirror, int offset) {
2947 address obj_p = cast_from_oop<address>(mirror) + offset;
2948 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2949 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2950 assert(end >= start, "sanity check");
2951
2952 if (obj_p >= start && obj_p < end) {
2953 return true;
2954 } else {
2955 return false;
2956 }
2957 }
2958 #endif // #ifdef ASSERT
2959
2960 // a class references its super class, interfaces, class loader, ...
2961 // and finally its static fields
2962 inline bool VM_HeapWalkOperation::iterate_over_class(const JvmtiHeapwalkObject& o) {
2963 assert(!o.is_flat(), "Klass object cannot be flattened");
2964 Klass* klass = java_lang_Class::as_Klass(o.obj());
2965 int i;
2966
2967 if (klass->is_instance_klass()) {
2968 InstanceKlass* ik = InstanceKlass::cast(klass);
2969
2970 // Ignore the class if it hasn't been initialized yet
2971 if (!ik->is_linked()) {
2972 return true;
2973 }
2974
2975 // get the java mirror
2976 oop mirror_oop = klass->java_mirror();
2977 JvmtiHeapwalkObject mirror(mirror_oop);
2978
2979 // super (only if something more interesting than java.lang.Object)
2980 InstanceKlass* super_klass = ik->super();
2981 if (super_klass != nullptr && super_klass != vmClasses::Object_klass()) {
2982 oop super_oop = super_klass->java_mirror();
2983 if (!CallbackInvoker::report_superclass_reference(mirror, super_oop)) {
2984 return false;
2985 }
2986 }
2987
2988 // class loader
2989 oop cl = ik->class_loader();
2990 if (cl != nullptr) {
2991 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2992 return false;
2993 }
2994 }
2995
2996 // protection domain
2997 oop pd = ik->protection_domain();
2998 if (pd != nullptr) {
2999 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
3000 return false;
3001 }
3002 }
3003
3004 // signers
3005 oop signers = ik->signers();
3006 if (signers != nullptr) {
3007 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
3008 return false;
3009 }
3010 }
3011
3012 // references from the constant pool
3013 {
3014 ConstantPool* pool = ik->constants();
3015 for (int i = 1; i < pool->length(); i++) {
3016 constantTag tag = pool->tag_at(i).value();
3017 if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) {
3018 oop entry;
3019 if (tag.is_string()) {
3020 entry = pool->resolved_string_at(i);
3021 // If the entry is non-null it is resolved.
3022 if (entry == nullptr) {
3023 continue;
3024 }
3025 } else if (tag.is_klass()) {
3026 entry = pool->resolved_klass_at(i)->java_mirror();
3027 } else {
3028 // Code generated by JIT compilers might not resolve constant
3029 // pool entries. Treat them as resolved if they are loaded.
3030 assert(tag.is_unresolved_klass(), "must be");
3031 constantPoolHandle cp(Thread::current(), pool);
3032 Klass* klass = ConstantPool::klass_at_if_loaded(cp, i);
3033 if (klass == nullptr) {
3034 continue;
3035 }
3036 entry = klass->java_mirror();
3037 }
3038 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
3039 return false;
3040 }
3041 }
3042 }
3043 }
3044
3045 // interfaces
3046 // (These will already have been reported as references from the constant pool
3047 // but are specified by IterateOverReachableObjects and must be reported).
3048 Array<InstanceKlass*>* interfaces = ik->local_interfaces();
3049 for (i = 0; i < interfaces->length(); i++) {
3050 oop interf = interfaces->at(i)->java_mirror();
3051 if (interf == nullptr) {
3052 continue;
3053 }
3054 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
3055 return false;
3056 }
3057 }
3058
3059 // iterate over the static fields
3060
3061 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
3062 for (i=0; i<field_map->field_count(); i++) {
3063 ClassFieldDescriptor* field = field_map->field_at(i);
3064 char type = field->field_type();
3065 if (!is_primitive_field_type(type)) {
3066 oop fld_o = mirror_oop->obj_field(field->field_offset());
3067 assert(verify_static_oop(ik, mirror_oop, field->field_offset()), "sanity check");
3068 if (fld_o != nullptr) {
3069 int slot = field->field_index();
3070 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
3071 delete field_map;
3072 return false;
3073 }
3074 }
3075 } else {
3076 if (is_reporting_primitive_fields()) {
3077 address addr = cast_from_oop<address>(mirror_oop) + field->field_offset();
3078 int slot = field->field_index();
3079 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
3080 delete field_map;
3081 return false;
3082 }
3083 }
3084 }
3085 }
3086 delete field_map;
3087
3088 return true;
3089 }
3090
3091 return true;
3092 }
3093
3094 // an object references a class and its instance fields
3095 // (static fields are ignored here as we report these as
3096 // references from the class).
3097 inline bool VM_HeapWalkOperation::iterate_over_object(const JvmtiHeapwalkObject& o) {
3098 // reference to the class
3099 if (!CallbackInvoker::report_class_reference(o, o.klass()->java_mirror())) {
3100 return false;
3101 }
3102
3103 // iterate over instance fields
3104 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o.klass());
3105 for (int i=0; i<field_map->field_count(); i++) {
3106 ClassFieldDescriptor* field = field_map->field_at(i);
3107 char type = field->field_type();
3108 int slot = field->field_index();
3109 int field_offset = field->field_offset();
3110 if (o.is_flat()) {
3111 // the object is inlined, its fields are stored without the header
3112 field_offset += o.offset() - o.inline_klass()->payload_offset();
3113 }
3114 if (!is_primitive_field_type(type)) {
3115 if (field->is_flat()) {
3116 // check for possible nulls
3117 if (LayoutKindHelper::is_nullable_flat(field->layout_kind())) {
3118 address payload = cast_from_oop<address>(o.obj()) + field_offset;
3119 if (field->inline_klass()->is_payload_marked_as_null(payload)) {
3120 continue;
3121 }
3122 }
3123 JvmtiHeapwalkObject field_obj(o.obj(), field_offset, field->inline_klass(), field->layout_kind());
3124 if (!CallbackInvoker::report_field_reference(o, field_obj, slot)) {
3125 return false;
3126 }
3127 } else {
3128 oop fld_o = o.obj()->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field_offset);
3129 // ignore any objects that aren't visible to profiler
3130 if (fld_o != nullptr) {
3131 assert(Universe::heap()->is_in(fld_o), "unsafe code should not have references to Klass* anymore");
3132 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
3133 return false;
3134 }
3135 }
3136 }
3137 } else {
3138 if (is_reporting_primitive_fields()) {
3139 // primitive instance field
3140 address addr = cast_from_oop<address>(o.obj()) + field_offset;
3141 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
3142 return false;
3143 }
3144 }
3145 }
3146 }
3147
3148 // if the object is a java.lang.String
3149 if (is_reporting_string_values() &&
3150 o.klass() == vmClasses::String_klass()) {
3151 if (!CallbackInvoker::report_string_value(o)) {
3152 return false;
3153 }
3154 }
3155 return true;
3156 }
3157
3158
3159 // Collects all simple (non-stack) roots except for threads;
3160 // threads are handled in collect_stack_roots() as an optimization.
3161 // if there's a heap root callback provided then the callback is
3162 // invoked for each simple root.
3163 // if an object reference callback is provided then all simple
3164 // roots are pushed onto the marking stack so that they can be
3165 // processed later
3166 //
3167 inline bool VM_HeapWalkOperation::collect_simple_roots() {
3168 SimpleRootsClosure blk;
3169
3170 // JNI globals
3171 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
3172 JNIHandles::oops_do(&blk);
3173 if (blk.stopped()) {
3174 return false;
3175 }
3176
3177 // Preloaded classes and loader from the system dictionary
3178 CLDRootsClosure cld_roots_closure;
3179 CLDToOopClosure cld_closure(&cld_roots_closure, ClassLoaderData::_claim_none);
3180 ClassLoaderDataGraph::always_strong_cld_do(&cld_closure);
3181 if (cld_roots_closure.stopped()) {
3182 return false;
3183 }
3184
3185 // threads are now handled in collect_stack_roots()
3186
3187 // Other kinds of roots maintained by HotSpot
3188 // Many of these won't be visible but others (such as instances of important
3189 // exceptions) will be visible.
3190 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3191 Universe::vm_global()->oops_do(&blk);
3192 if (blk.stopped()) {
3193 return false;
3194 }
3195
3196 return true;
3197 }
3198
3199 // Reports the thread as JVMTI_HEAP_REFERENCE_THREAD,
3200 // walks the stack of the thread, finds all references (locals
3201 // and JNI calls) and reports these as stack references.
3202 inline bool VM_HeapWalkOperation::collect_stack_refs(JavaThread* java_thread,
3203 JNILocalRootsClosure* blk)
3204 {
3205 oop threadObj = java_thread->threadObj();
3206 oop mounted_vt = java_thread->is_vthread_mounted() ? java_thread->vthread() : nullptr;
3207 if (mounted_vt != nullptr && !JvmtiEnvBase::is_vthread_alive(mounted_vt)) {
3208 mounted_vt = nullptr;
3209 }
3210 assert(threadObj != nullptr, "sanity check");
3211
3212 StackRefCollector stack_collector(tag_map(), blk, java_thread);
3213
3214 if (!java_thread->has_last_Java_frame()) {
3215 if (!stack_collector.set_thread(JVMTI_HEAP_REFERENCE_THREAD, threadObj)) {
3216 return false;
3217 }
3218 // no last java frame but there may be JNI locals
3219 blk->set_context(_tag_map->find(threadObj), java_lang_Thread::thread_id(threadObj), 0, (jmethodID)nullptr);
3220 java_thread->active_handles()->oops_do(blk);
3221 return !blk->stopped();
3222 }
3223 // vframes are resource allocated
3224 Thread* current_thread = Thread::current();
3225 ResourceMark rm(current_thread);
3226 HandleMark hm(current_thread);
3227
3228 RegisterMap reg_map(java_thread,
3229 RegisterMap::UpdateMap::include,
3230 RegisterMap::ProcessFrames::include,
3231 RegisterMap::WalkContinuation::include);
3232
3233 // first handle mounted vthread (if any)
3234 if (mounted_vt != nullptr) {
3235 frame f = java_thread->last_frame();
3236 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread);
3237 // report virtual thread as JVMTI_HEAP_REFERENCE_OTHER
3238 if (!stack_collector.set_thread(JVMTI_HEAP_REFERENCE_OTHER, mounted_vt)) {
3239 return false;
3240 }
3241 // split virtual thread and carrier thread stacks by vthread entry ("enterSpecial") frame,
3242 // consider vthread entry frame as the last vthread stack frame
3243 while (vf != nullptr) {
3244 if (!stack_collector.do_frame(vf)) {
3245 return false;
3246 }
3247 if (vf->is_vthread_entry()) {
3248 break;
3249 }
3250 vf = vf->sender();
3251 }
3252 }
3253 // Platform or carrier thread.
3254 vframe* vf = JvmtiEnvBase::get_cthread_last_java_vframe(java_thread, ®_map);
3255 if (!stack_collector.set_thread(JVMTI_HEAP_REFERENCE_THREAD, threadObj)) {
3256 return false;
3257 }
3258 return stack_collector.process_frames(vf);
3259 }
3260
3261
3262 // Collects the simple roots for all threads and collects all
3263 // stack roots - for each thread it walks the execution
3264 // stack to find all references and local JNI refs.
3265 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3266 JNILocalRootsClosure blk;
3267 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
3268 oop threadObj = thread->threadObj();
3269 if (threadObj != nullptr && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3270 if (!collect_stack_refs(thread, &blk)) {
3271 return false;
3272 }
3273 }
3274 }
3275 return true;
3276 }
3277
3278 // Reports stack references for the unmounted virtual thread.
3279 inline bool VM_HeapWalkOperation::collect_vthread_stack_refs(oop vt) {
3280 if (!JvmtiEnvBase::is_vthread_alive(vt)) {
3281 return true;
3282 }
3283 ContinuationWrapper cont(java_lang_VirtualThread::continuation(vt));
3284 if (cont.is_empty()) {
3285 return true;
3286 }
3287 assert(!cont.is_mounted(), "sanity check");
3288
3289 stackChunkOop chunk = cont.last_nonempty_chunk();
3290 if (chunk == nullptr || chunk->is_empty()) {
3291 return true;
3292 }
3293
3294 // vframes are resource allocated
3295 Thread* current_thread = Thread::current();
3296 ResourceMark rm(current_thread);
3297 HandleMark hm(current_thread);
3298
3299 RegisterMap reg_map(cont.continuation(), RegisterMap::UpdateMap::include);
3300
3301 JNILocalRootsClosure blk;
3302 // JavaThread is not required for unmounted virtual threads
3303 StackRefCollector stack_collector(tag_map(), &blk, nullptr);
3304 // reference to the vthread is already reported
3305 if (!stack_collector.set_thread(vt)) {
3306 return false;
3307 }
3308
3309 frame fr = chunk->top_frame(®_map);
3310 vframe* vf = vframe::new_vframe(&fr, ®_map, nullptr);
3311 return stack_collector.process_frames(vf);
3312 }
3313
3314 // visit an object
3315 // first mark the object as visited
3316 // second get all the outbound references from this object (in other words, all
3317 // the objects referenced by this object).
3318 //
3319 bool VM_HeapWalkOperation::visit(const JvmtiHeapwalkObject& o) {
3320 // mark object as visited
3321 assert(!visit_stack()->is_visited(o), "can't visit same object more than once");
3322 visit_stack()->mark_visited(o);
3323
3324 Klass* klass = o.klass();
3325 // instance
3326 if (klass->is_instance_klass()) {
3327 if (klass == vmClasses::Class_klass()) {
3328 assert(!o.is_flat(), "Class object cannot be flattened");
3329 if (!java_lang_Class::is_primitive(o.obj())) {
3330 // a java.lang.Class
3331 return iterate_over_class(o);
3332 }
3333 } else {
3334 // we report stack references only when initial object is not specified
3335 // (in the case we start from heap roots which include platform thread stack references)
3336 if (initial_object().is_null() && java_lang_VirtualThread::is_subclass(klass)) {
3337 assert(!o.is_flat(), "VirtualThread object cannot be flattened");
3338 if (!collect_vthread_stack_refs(o.obj())) {
3339 return false;
3340 }
3341 }
3342 return iterate_over_object(o);
3343 }
3344 }
3345
3346 // flat object array
3347 if (klass->is_flatArray_klass()) {
3348 return iterate_over_flat_array(o);
3349 }
3350
3351 // object array
3352 if (klass->is_objArray_klass()) {
3353 return iterate_over_array(o);
3354 }
3355
3356 // type array
3357 if (klass->is_typeArray_klass()) {
3358 return iterate_over_type_array(o);
3359 }
3360
3361 return true;
3362 }
3363
3364 void VM_HeapWalkOperation::doit() {
3365 ResourceMark rm;
3366 ClassFieldMapCacheMark cm;
3367
3368 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects);
3369
3370 assert(visit_stack()->is_empty(), "visit stack must be empty");
3371
3372 // the heap walk starts with an initial object or the heap roots
3373 if (initial_object().is_null()) {
3374 // can result in a big performance boost for an agent that is
3375 // focused on analyzing references in the thread stacks.
3376 if (!collect_stack_roots()) return;
3377
3378 if (!collect_simple_roots()) return;
3379 } else {
3380 visit_stack()->push(initial_object()());
3381 }
3382
3383 // object references required
3384 if (is_following_references()) {
3385
3386 // visit each object until all reachable objects have been
3387 // visited or the callback asked to terminate the iteration.
3388 while (!visit_stack()->is_empty()) {
3389 const JvmtiHeapwalkObject o = visit_stack()->pop();
3390 if (!visit_stack()->is_visited(o)) {
3391 if (!visit(o)) {
3392 break;
3393 }
3394 }
3395 }
3396 }
3397 }
3398
3399 // iterate over all objects that are reachable from a set of roots
3400 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3401 jvmtiStackReferenceCallback stack_ref_callback,
3402 jvmtiObjectReferenceCallback object_ref_callback,
3403 const void* user_data) {
3404 // VTMS transitions must be disabled before the EscapeBarrier.
3405 MountUnmountDisabler disabler;
3406
3407 JavaThread* jt = JavaThread::current();
3408 EscapeBarrier eb(true, jt);
3409 eb.deoptimize_objects_all_threads();
3410 Arena dead_object_arena(mtServiceability);
3411 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
3412
3413 {
3414 MutexLocker ml(Heap_lock);
3415 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3416 VM_HeapWalkOperation op(this, Handle(), context, user_data, &dead_objects);
3417 VMThread::execute(&op);
3418 }
3419 convert_flat_object_entries();
3420
3421 // Post events outside of Heap_lock
3422 post_dead_objects(&dead_objects);
3423 }
3424
3425 // iterate over all objects that are reachable from a given object
3426 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3427 jvmtiObjectReferenceCallback object_ref_callback,
3428 const void* user_data) {
3429 oop obj = JNIHandles::resolve(object);
3430 Handle initial_object(Thread::current(), obj);
3431
3432 Arena dead_object_arena(mtServiceability);
3433 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
3434
3435 MountUnmountDisabler disabler;
3436
3437 {
3438 MutexLocker ml(Heap_lock);
3439 BasicHeapWalkContext context(nullptr, nullptr, object_ref_callback);
3440 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects);
3441 VMThread::execute(&op);
3442 }
3443 convert_flat_object_entries();
3444
3445 // Post events outside of Heap_lock
3446 post_dead_objects(&dead_objects);
3447 }
3448
3449 // follow references from an initial object or the GC roots
3450 void JvmtiTagMap::follow_references(jint heap_filter,
3451 Klass* klass,
3452 jobject object,
3453 const jvmtiHeapCallbacks* callbacks,
3454 const void* user_data)
3455 {
3456 // VTMS transitions must be disabled before the EscapeBarrier.
3457 MountUnmountDisabler disabler;
3458
3459 oop obj = JNIHandles::resolve(object);
3460 JavaThread* jt = JavaThread::current();
3461 Handle initial_object(jt, obj);
3462 // EA based optimizations that are tagged or reachable from initial_object are already reverted.
3463 EscapeBarrier eb(initial_object.is_null() &&
3464 !(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED),
3465 jt);
3466 eb.deoptimize_objects_all_threads();
3467
3468 Arena dead_object_arena(mtServiceability);
3469 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
3470
3471 {
3472 MutexLocker ml(Heap_lock);
3473 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3474 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects);
3475 VMThread::execute(&op);
3476 }
3477 convert_flat_object_entries();
3478
3479 // Post events outside of Heap_lock
3480 post_dead_objects(&dead_objects);
3481 }
3482
3483 // Verify gc_notification follows set_needs_cleaning.
3484 DEBUG_ONLY(static bool notified_needs_cleaning = false;)
3485
3486 void JvmtiTagMap::set_needs_cleaning() {
3487 assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause");
3488 assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3489 // Can't assert !notified_needs_cleaning; a partial GC might be upgraded
3490 // to a full GC and do this twice without intervening gc_notification.
3491 DEBUG_ONLY(notified_needs_cleaning = true;)
3492
3493 JvmtiEnvIterator it;
3494 for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
3495 JvmtiTagMap* tag_map = env->tag_map_acquire();
3496 if (tag_map != nullptr) {
3497 tag_map->_needs_cleaning = !tag_map->is_empty();
3498 }
3499 }
3500 }
3501
3502 void JvmtiTagMap::gc_notification(size_t num_dead_entries) {
3503 assert(notified_needs_cleaning, "missing GC notification");
3504 DEBUG_ONLY(notified_needs_cleaning = false;)
3505
3506 // Notify ServiceThread if there's work to do.
3507 {
3508 MonitorLocker ml(Service_lock, Mutex::_no_safepoint_check_flag);
3509 _has_object_free_events = (num_dead_entries != 0);
3510 if (_has_object_free_events) ml.notify_all();
3511 }
3512
3513 // If no dead entries then cancel cleaning requests.
3514 if (num_dead_entries == 0) {
3515 JvmtiEnvIterator it;
3516 for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
3517 JvmtiTagMap* tag_map = env->tag_map_acquire();
3518 if (tag_map != nullptr) {
3519 MutexLocker ml (tag_map->lock(), Mutex::_no_safepoint_check_flag);
3520 tag_map->_needs_cleaning = false;
3521 }
3522 }
3523 }
3524 }
3525
3526 // Used by ServiceThread to discover there is work to do.
3527 bool JvmtiTagMap::has_object_free_events_and_reset() {
3528 assert_lock_strong(Service_lock);
3529 bool result = _has_object_free_events;
3530 _has_object_free_events = false;
3531 return result;
3532 }
3533
3534 // Used by ServiceThread to clean up tagmaps.
3535 void JvmtiTagMap::flush_all_object_free_events() {
3536 JavaThread* thread = JavaThread::current();
3537 JvmtiEnvIterator it;
3538 for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
3539 JvmtiTagMap* tag_map = env->tag_map_acquire();
3540 if (tag_map != nullptr) {
3541 tag_map->flush_object_free_events();
3542 ThreadBlockInVM tbiv(thread); // Be safepoint-polite while looping.
3543 }
3544 }
3545 }