1 /*
2 * Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classLoaderDataGraph.hpp"
27 #include "classfile/javaClasses.inline.hpp"
28 #include "classfile/symbolTable.hpp"
29 #include "classfile/vmClasses.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "gc/shared/collectedHeap.hpp"
32 #include "jvmtifiles/jvmtiEnv.hpp"
33 #include "logging/log.hpp"
34 #include "memory/allocation.inline.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/access.inline.hpp"
38 #include "oops/arrayOop.hpp"
39 #include "oops/constantPool.inline.hpp"
40 #include "oops/instanceMirrorKlass.hpp"
41 #include "oops/klass.inline.hpp"
42 #include "oops/objArrayKlass.hpp"
43 #include "oops/objArrayOop.inline.hpp"
44 #include "oops/oop.inline.hpp"
45 #include "oops/typeArrayOop.inline.hpp"
46 #include "prims/jvmtiEventController.hpp"
47 #include "prims/jvmtiEventController.inline.hpp"
48 #include "prims/jvmtiExport.hpp"
49 #include "prims/jvmtiImpl.hpp"
50 #include "prims/jvmtiTagMap.hpp"
51 #include "prims/jvmtiTagMapTable.hpp"
52 #include "runtime/biasedLocking.hpp"
53 #include "runtime/deoptimization.hpp"
54 #include "runtime/frame.inline.hpp"
55 #include "runtime/handles.inline.hpp"
56 #include "runtime/interfaceSupport.inline.hpp"
57 #include "runtime/javaCalls.hpp"
58 #include "runtime/jniHandles.inline.hpp"
59 #include "runtime/mutex.hpp"
60 #include "runtime/mutexLocker.hpp"
61 #include "runtime/reflectionUtils.hpp"
62 #include "runtime/safepoint.hpp"
63 #include "runtime/timerTrace.hpp"
64 #include "runtime/thread.inline.hpp"
65 #include "runtime/threadSMR.hpp"
66 #include "runtime/vframe.hpp"
67 #include "runtime/vmThread.hpp"
68 #include "runtime/vmOperations.hpp"
69 #include "utilities/objectBitSet.inline.hpp"
70 #include "utilities/macros.hpp"
71
72 typedef ObjectBitSet<mtServiceability> JVMTIBitSet;
73
74 bool JvmtiTagMap::_has_object_free_events = false;
75
76 // create a JvmtiTagMap
77 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
78 _env(env),
79 _lock(Mutex::nonleaf+1, "JvmtiTagMap_lock", Mutex::_allow_vm_block_flag,
80 Mutex::_safepoint_check_never),
81 _needs_rehashing(false),
82 _needs_cleaning(false),
83 _posting_events(false) {
84
85 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
86 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
87
88 _hashmap = new JvmtiTagMapTable();
89
90 // finally add us to the environment
91 ((JvmtiEnvBase *)env)->release_set_tag_map(this);
92 }
93
94 // destroy a JvmtiTagMap
95 JvmtiTagMap::~JvmtiTagMap() {
96
97 // no lock acquired as we assume the enclosing environment is
98 // also being destroyed.
99 ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
100
101 // finally destroy the hashmap
102 delete _hashmap;
103 _hashmap = NULL;
104 }
105
106 // Called by env_dispose() to reclaim memory before deallocation.
107 // Remove all the entries but keep the empty table intact.
108 // This needs the table lock.
109 void JvmtiTagMap::clear() {
110 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
111 _hashmap->clear();
112 }
113
114 // returns the tag map for the given environments. If the tag map
115 // doesn't exist then it is created.
116 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
117 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire();
118 if (tag_map == NULL) {
119 MutexLocker mu(JvmtiThreadState_lock);
120 tag_map = ((JvmtiEnvBase*)env)->tag_map();
121 if (tag_map == NULL) {
122 tag_map = new JvmtiTagMap(env);
123 }
124 } else {
125 DEBUG_ONLY(JavaThread::current()->check_possible_safepoint());
126 }
127 return tag_map;
128 }
129
130 // iterate over all entries in the tag map.
131 void JvmtiTagMap::entry_iterate(JvmtiTagMapEntryClosure* closure) {
132 hashmap()->entry_iterate(closure);
133 }
134
135 // returns true if the hashmaps are empty
136 bool JvmtiTagMap::is_empty() {
137 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
138 return hashmap()->is_empty();
139 }
140
141 // This checks for posting and rehashing before operations that
142 // this tagmap table.
143 void JvmtiTagMap::check_hashmap(GrowableArray<jlong>* objects) {
144 assert(is_locked(), "checking");
145
146 if (is_empty()) { return; }
147
148 if (_needs_cleaning &&
149 objects != NULL &&
150 env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
151 remove_dead_entries_locked(objects);
152 }
153 if (_needs_rehashing) {
154 log_info(jvmti, table)("TagMap table needs rehashing");
155 hashmap()->rehash();
156 _needs_rehashing = false;
157 }
158 }
159
160 // This checks for posting and rehashing and is called from the heap walks.
161 void JvmtiTagMap::check_hashmaps_for_heapwalk(GrowableArray<jlong>* objects) {
162 assert(SafepointSynchronize::is_at_safepoint(), "called from safepoints");
163
164 // Verify that the tag map tables are valid and unconditionally post events
165 // that are expected to be posted before gc_notification.
166 JvmtiEnvIterator it;
167 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
168 JvmtiTagMap* tag_map = env->tag_map_acquire();
169 if (tag_map != NULL) {
170 // The ZDriver may be walking the hashmaps concurrently so this lock is needed.
171 MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag);
172 tag_map->check_hashmap(objects);
173 }
174 }
175 }
176
177 // Return the tag value for an object, or 0 if the object is
178 // not tagged
179 //
180 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
181 JvmtiTagMapEntry* entry = tag_map->hashmap()->find(o);
182 if (entry == NULL) {
183 return 0;
184 } else {
185 jlong tag = entry->tag();
186 assert(tag != 0, "should not be zero");
187 return entry->tag();
188 }
189 }
190
191
192 // A CallbackWrapper is a support class for querying and tagging an object
193 // around a callback to a profiler. The constructor does pre-callback
194 // work to get the tag value, klass tag value, ... and the destructor
195 // does the post-callback work of tagging or untagging the object.
196 //
197 // {
198 // CallbackWrapper wrapper(tag_map, o);
199 //
200 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
201 //
202 // } // wrapper goes out of scope here which results in the destructor
203 // checking to see if the object has been tagged, untagged, or the
204 // tag value has changed.
205 //
206 class CallbackWrapper : public StackObj {
207 private:
208 JvmtiTagMap* _tag_map;
209 JvmtiTagMapTable* _hashmap;
210 JvmtiTagMapEntry* _entry;
211 oop _o;
212 jlong _obj_size;
213 jlong _obj_tag;
214 jlong _klass_tag;
215
216 protected:
217 JvmtiTagMap* tag_map() const { return _tag_map; }
218
219 // invoked post-callback to tag, untag, or update the tag of an object
220 void inline post_callback_tag_update(oop o, JvmtiTagMapTable* hashmap,
221 JvmtiTagMapEntry* entry, jlong obj_tag);
222 public:
223 CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
224 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
225 "MT unsafe or must be VM thread");
226
227 // object to tag
228 _o = o;
229
230 // object size
231 _obj_size = (jlong)_o->size() * wordSize;
232
233 // record the context
234 _tag_map = tag_map;
235 _hashmap = tag_map->hashmap();
236 _entry = _hashmap->find(_o);
237
238 // get object tag
239 _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
240
241 // get the class and the class's tag value
242 assert(vmClasses::Class_klass()->is_mirror_instance_klass(), "Is not?");
243
244 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
245 }
246
247 ~CallbackWrapper() {
248 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
249 }
250
251 inline jlong* obj_tag_p() { return &_obj_tag; }
252 inline jlong obj_size() const { return _obj_size; }
253 inline jlong obj_tag() const { return _obj_tag; }
254 inline jlong klass_tag() const { return _klass_tag; }
255 };
256
257
258
259 // callback post-callback to tag, untag, or update the tag of an object
260 void inline CallbackWrapper::post_callback_tag_update(oop o,
261 JvmtiTagMapTable* hashmap,
262 JvmtiTagMapEntry* entry,
263 jlong obj_tag) {
264 if (entry == NULL) {
265 if (obj_tag != 0) {
266 // callback has tagged the object
267 assert(Thread::current()->is_VM_thread(), "must be VMThread");
268 hashmap->add(o, obj_tag);
269 }
270 } else {
271 // object was previously tagged - the callback may have untagged
272 // the object or changed the tag value
273 if (obj_tag == 0) {
274 hashmap->remove(o);
275 } else {
276 if (obj_tag != entry->tag()) {
277 entry->set_tag(obj_tag);
278 }
279 }
280 }
281 }
282
283 // An extended CallbackWrapper used when reporting an object reference
284 // to the agent.
285 //
286 // {
287 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
288 //
289 // (*callback)(wrapper.klass_tag(),
290 // wrapper.obj_size(),
291 // wrapper.obj_tag_p()
292 // wrapper.referrer_tag_p(), ...)
293 //
294 // } // wrapper goes out of scope here which results in the destructor
295 // checking to see if the referrer object has been tagged, untagged,
296 // or the tag value has changed.
297 //
298 class TwoOopCallbackWrapper : public CallbackWrapper {
299 private:
300 bool _is_reference_to_self;
301 JvmtiTagMapTable* _referrer_hashmap;
302 JvmtiTagMapEntry* _referrer_entry;
303 oop _referrer;
304 jlong _referrer_obj_tag;
305 jlong _referrer_klass_tag;
306 jlong* _referrer_tag_p;
307
308 bool is_reference_to_self() const { return _is_reference_to_self; }
309
310 public:
311 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
312 CallbackWrapper(tag_map, o)
313 {
314 // self reference needs to be handled in a special way
315 _is_reference_to_self = (referrer == o);
316
317 if (_is_reference_to_self) {
318 _referrer_klass_tag = klass_tag();
319 _referrer_tag_p = obj_tag_p();
320 } else {
321 _referrer = referrer;
322 // record the context
323 _referrer_hashmap = tag_map->hashmap();
324 _referrer_entry = _referrer_hashmap->find(_referrer);
325
326 // get object tag
327 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
328 _referrer_tag_p = &_referrer_obj_tag;
329
330 // get referrer class tag.
331 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
332 }
333 }
334
335 ~TwoOopCallbackWrapper() {
336 if (!is_reference_to_self()){
337 post_callback_tag_update(_referrer,
338 _referrer_hashmap,
339 _referrer_entry,
340 _referrer_obj_tag);
341 }
342 }
343
344 // address of referrer tag
345 // (for a self reference this will return the same thing as obj_tag_p())
346 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
347
348 // referrer's class tag
349 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
350 };
351
352 // tag an object
353 //
354 // This function is performance critical. If many threads attempt to tag objects
355 // around the same time then it's possible that the Mutex associated with the
356 // tag map will be a hot lock.
357 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
358 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
359
360 // SetTag should not post events because the JavaThread has to
361 // transition to native for the callback and this cannot stop for
362 // safepoints with the hashmap lock held.
363 check_hashmap(NULL); /* don't collect dead objects */
364
365 // resolve the object
366 oop o = JNIHandles::resolve_non_null(object);
367
368 // see if the object is already tagged
369 JvmtiTagMapTable* hashmap = _hashmap;
370 JvmtiTagMapEntry* entry = hashmap->find(o);
371
372 // if the object is not already tagged then we tag it
373 if (entry == NULL) {
374 if (tag != 0) {
375 hashmap->add(o, tag);
376 } else {
377 // no-op
378 }
379 } else {
380 // if the object is already tagged then we either update
381 // the tag (if a new tag value has been provided)
382 // or remove the object if the new tag value is 0.
383 if (tag == 0) {
384 hashmap->remove(o);
385 } else {
386 entry->set_tag(tag);
387 }
388 }
389 }
390
391 // get the tag for an object
392 jlong JvmtiTagMap::get_tag(jobject object) {
393 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
394
395 // GetTag should not post events because the JavaThread has to
396 // transition to native for the callback and this cannot stop for
397 // safepoints with the hashmap lock held.
398 check_hashmap(NULL); /* don't collect dead objects */
399
400 // resolve the object
401 oop o = JNIHandles::resolve_non_null(object);
402
403 return tag_for(this, o);
404 }
405
406
407 // Helper class used to describe the static or instance fields of a class.
408 // For each field it holds the field index (as defined by the JVMTI specification),
409 // the field type, and the offset.
410
411 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
412 private:
413 int _field_index;
414 int _field_offset;
415 char _field_type;
416 public:
417 ClassFieldDescriptor(int index, char type, int offset) :
418 _field_index(index), _field_offset(offset), _field_type(type) {
419 }
420 int field_index() const { return _field_index; }
421 char field_type() const { return _field_type; }
422 int field_offset() const { return _field_offset; }
423 };
424
425 class ClassFieldMap: public CHeapObj<mtInternal> {
426 private:
427 enum {
428 initial_field_count = 5
429 };
430
431 // list of field descriptors
432 GrowableArray<ClassFieldDescriptor*>* _fields;
433
434 // constructor
435 ClassFieldMap();
436
437 // add a field
438 void add(int index, char type, int offset);
439
440 public:
441 ~ClassFieldMap();
442
443 // access
444 int field_count() { return _fields->length(); }
445 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
446
447 // functions to create maps of static or instance fields
448 static ClassFieldMap* create_map_of_static_fields(Klass* k);
449 static ClassFieldMap* create_map_of_instance_fields(oop obj);
450 };
451
452 ClassFieldMap::ClassFieldMap() {
453 _fields = new (ResourceObj::C_HEAP, mtServiceability)
454 GrowableArray<ClassFieldDescriptor*>(initial_field_count, mtServiceability);
455 }
456
457 ClassFieldMap::~ClassFieldMap() {
458 for (int i=0; i<_fields->length(); i++) {
459 delete _fields->at(i);
460 }
461 delete _fields;
462 }
463
464 void ClassFieldMap::add(int index, char type, int offset) {
465 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
466 _fields->append(field);
467 }
468
469 // Returns a heap allocated ClassFieldMap to describe the static fields
470 // of the given class.
471 //
472 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
473 InstanceKlass* ik = InstanceKlass::cast(k);
474
475 // create the field map
476 ClassFieldMap* field_map = new ClassFieldMap();
477
478 FilteredFieldStream f(ik, false, false);
479 int max_field_index = f.field_count()-1;
480
481 int index = 0;
482 for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) {
483 // ignore instance fields
484 if (!fld.access_flags().is_static()) {
485 continue;
486 }
487 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset());
488 }
489 return field_map;
490 }
491
492 // Returns a heap allocated ClassFieldMap to describe the instance fields
493 // of the given class. All instance fields are included (this means public
494 // and private fields declared in superclasses and superinterfaces too).
495 //
496 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
497 InstanceKlass* ik = InstanceKlass::cast(obj->klass());
498
499 // create the field map
500 ClassFieldMap* field_map = new ClassFieldMap();
501
502 FilteredFieldStream f(ik, false, false);
503
504 int max_field_index = f.field_count()-1;
505
506 int index = 0;
507 for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) {
508 // ignore static fields
509 if (fld.access_flags().is_static()) {
510 continue;
511 }
512 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset());
513 }
514
515 return field_map;
516 }
517
518 // Helper class used to cache a ClassFileMap for the instance fields of
519 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
520 // heap iteration and avoid creating a field map for each object in the heap
521 // (only need to create the map when the first instance of a class is encountered).
522 //
523 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
524 private:
525 enum {
526 initial_class_count = 200
527 };
528 ClassFieldMap* _field_map;
529
530 ClassFieldMap* field_map() const { return _field_map; }
531
532 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
533 ~JvmtiCachedClassFieldMap();
534
535 static GrowableArray<InstanceKlass*>* _class_list;
536 static void add_to_class_list(InstanceKlass* ik);
537
538 public:
539 // returns the field map for a given object (returning map cached
540 // by InstanceKlass if possible
541 static ClassFieldMap* get_map_of_instance_fields(oop obj);
542
543 // removes the field map from all instanceKlasses - should be
544 // called before VM operation completes
545 static void clear_cache();
546
547 // returns the number of ClassFieldMap cached by instanceKlasses
548 static int cached_field_map_count();
549 };
550
551 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
552
553 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
554 _field_map = field_map;
555 }
556
557 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
558 if (_field_map != NULL) {
559 delete _field_map;
560 }
561 }
562
563 // Marker class to ensure that the class file map cache is only used in a defined
564 // scope.
565 class ClassFieldMapCacheMark : public StackObj {
566 private:
567 static bool _is_active;
568 public:
569 ClassFieldMapCacheMark() {
570 assert(Thread::current()->is_VM_thread(), "must be VMThread");
571 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
572 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
573 _is_active = true;
574 }
575 ~ClassFieldMapCacheMark() {
576 JvmtiCachedClassFieldMap::clear_cache();
577 _is_active = false;
578 }
579 static bool is_active() { return _is_active; }
580 };
581
582 bool ClassFieldMapCacheMark::_is_active;
583
584
585 // record that the given InstanceKlass is caching a field map
586 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
587 if (_class_list == NULL) {
588 _class_list = new (ResourceObj::C_HEAP, mtServiceability)
589 GrowableArray<InstanceKlass*>(initial_class_count, mtServiceability);
590 }
591 _class_list->push(ik);
592 }
593
594 // returns the instance field map for the given object
595 // (returns field map cached by the InstanceKlass if possible)
596 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
597 assert(Thread::current()->is_VM_thread(), "must be VMThread");
598 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
599
600 Klass* k = obj->klass();
601 InstanceKlass* ik = InstanceKlass::cast(k);
602
603 // return cached map if possible
604 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
605 if (cached_map != NULL) {
606 assert(cached_map->field_map() != NULL, "missing field list");
607 return cached_map->field_map();
608 } else {
609 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
610 cached_map = new JvmtiCachedClassFieldMap(field_map);
611 ik->set_jvmti_cached_class_field_map(cached_map);
612 add_to_class_list(ik);
613 return field_map;
614 }
615 }
616
617 // remove the fields maps cached from all instanceKlasses
618 void JvmtiCachedClassFieldMap::clear_cache() {
619 assert(Thread::current()->is_VM_thread(), "must be VMThread");
620 if (_class_list != NULL) {
621 for (int i = 0; i < _class_list->length(); i++) {
622 InstanceKlass* ik = _class_list->at(i);
623 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
624 assert(cached_map != NULL, "should not be NULL");
625 ik->set_jvmti_cached_class_field_map(NULL);
626 delete cached_map; // deletes the encapsulated field map
627 }
628 delete _class_list;
629 _class_list = NULL;
630 }
631 }
632
633 // returns the number of ClassFieldMap cached by instanceKlasses
634 int JvmtiCachedClassFieldMap::cached_field_map_count() {
635 return (_class_list == NULL) ? 0 : _class_list->length();
636 }
637
638 // helper function to indicate if an object is filtered by its tag or class tag
639 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
640 jlong klass_tag,
641 int heap_filter) {
642 // apply the heap filter
643 if (obj_tag != 0) {
644 // filter out tagged objects
645 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
646 } else {
647 // filter out untagged objects
648 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
649 }
650 if (klass_tag != 0) {
651 // filter out objects with tagged classes
652 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
653 } else {
654 // filter out objects with untagged classes.
655 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
656 }
657 return false;
658 }
659
660 // helper function to indicate if an object is filtered by a klass filter
661 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) {
662 if (klass_filter != NULL) {
663 if (obj->klass() != klass_filter) {
664 return true;
665 }
666 }
667 return false;
668 }
669
670 // helper function to tell if a field is a primitive field or not
671 static inline bool is_primitive_field_type(char type) {
672 return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY);
673 }
674
675 // helper function to copy the value from location addr to jvalue.
676 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
677 switch (value_type) {
678 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
679 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
680 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
681 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
682 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
683 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
684 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
685 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
686 default: ShouldNotReachHere();
687 }
688 }
689
690 // helper function to invoke string primitive value callback
691 // returns visit control flags
692 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
693 CallbackWrapper* wrapper,
694 oop str,
695 void* user_data)
696 {
697 assert(str->klass() == vmClasses::String_klass(), "not a string");
698
699 typeArrayOop s_value = java_lang_String::value(str);
700
701 // JDK-6584008: the value field may be null if a String instance is
702 // partially constructed.
703 if (s_value == NULL) {
704 return 0;
705 }
706 // get the string value and length
707 // (string value may be offset from the base)
708 int s_len = java_lang_String::length(str);
709 bool is_latin1 = java_lang_String::is_latin1(str);
710 jchar* value;
711 if (s_len > 0) {
712 if (!is_latin1) {
713 value = s_value->char_at_addr(0);
714 } else {
715 // Inflate latin1 encoded string to UTF16
716 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
717 for (int i = 0; i < s_len; i++) {
718 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
719 }
720 value = &buf[0];
721 }
722 } else {
723 // Don't use char_at_addr(0) if length is 0
724 value = (jchar*) s_value->base(T_CHAR);
725 }
726
727 // invoke the callback
728 jint res = (*cb)(wrapper->klass_tag(),
729 wrapper->obj_size(),
730 wrapper->obj_tag_p(),
731 value,
732 (jint)s_len,
733 user_data);
734
735 if (is_latin1 && s_len > 0) {
736 FREE_C_HEAP_ARRAY(jchar, value);
737 }
738 return res;
739 }
740
741 // helper function to invoke string primitive value callback
742 // returns visit control flags
743 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
744 CallbackWrapper* wrapper,
745 oop obj,
746 void* user_data)
747 {
748 assert(obj->is_typeArray(), "not a primitive array");
749
750 // get base address of first element
751 typeArrayOop array = typeArrayOop(obj);
752 BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
753 void* elements = array->base(type);
754
755 // jvmtiPrimitiveType is defined so this mapping is always correct
756 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
757
758 return (*cb)(wrapper->klass_tag(),
759 wrapper->obj_size(),
760 wrapper->obj_tag_p(),
761 (jint)array->length(),
762 elem_type,
763 elements,
764 user_data);
765 }
766
767 // helper function to invoke the primitive field callback for all static fields
768 // of a given class
769 static jint invoke_primitive_field_callback_for_static_fields
770 (CallbackWrapper* wrapper,
771 oop obj,
772 jvmtiPrimitiveFieldCallback cb,
773 void* user_data)
774 {
775 // for static fields only the index will be set
776 static jvmtiHeapReferenceInfo reference_info = { 0 };
777
778 assert(obj->klass() == vmClasses::Class_klass(), "not a class");
779 if (java_lang_Class::is_primitive(obj)) {
780 return 0;
781 }
782 Klass* klass = java_lang_Class::as_Klass(obj);
783
784 // ignore classes for object and type arrays
785 if (!klass->is_instance_klass()) {
786 return 0;
787 }
788
789 // ignore classes which aren't linked yet
790 InstanceKlass* ik = InstanceKlass::cast(klass);
791 if (!ik->is_linked()) {
792 return 0;
793 }
794
795 // get the field map
796 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
797
798 // invoke the callback for each static primitive field
799 for (int i=0; i<field_map->field_count(); i++) {
800 ClassFieldDescriptor* field = field_map->field_at(i);
801
802 // ignore non-primitive fields
803 char type = field->field_type();
804 if (!is_primitive_field_type(type)) {
805 continue;
806 }
807 // one-to-one mapping
808 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
809
810 // get offset and field value
811 int offset = field->field_offset();
812 address addr = cast_from_oop<address>(klass->java_mirror()) + offset;
813 jvalue value;
814 copy_to_jvalue(&value, addr, value_type);
815
816 // field index
817 reference_info.field.index = field->field_index();
818
819 // invoke the callback
820 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
821 &reference_info,
822 wrapper->klass_tag(),
823 wrapper->obj_tag_p(),
824 value,
825 value_type,
826 user_data);
827 if (res & JVMTI_VISIT_ABORT) {
828 delete field_map;
829 return res;
830 }
831 }
832
833 delete field_map;
834 return 0;
835 }
836
837 // helper function to invoke the primitive field callback for all instance fields
838 // of a given object
839 static jint invoke_primitive_field_callback_for_instance_fields(
840 CallbackWrapper* wrapper,
841 oop obj,
842 jvmtiPrimitiveFieldCallback cb,
843 void* user_data)
844 {
845 // for instance fields only the index will be set
846 static jvmtiHeapReferenceInfo reference_info = { 0 };
847
848 // get the map of the instance fields
849 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
850
851 // invoke the callback for each instance primitive field
852 for (int i=0; i<fields->field_count(); i++) {
853 ClassFieldDescriptor* field = fields->field_at(i);
854
855 // ignore non-primitive fields
856 char type = field->field_type();
857 if (!is_primitive_field_type(type)) {
858 continue;
859 }
860 // one-to-one mapping
861 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
862
863 // get offset and field value
864 int offset = field->field_offset();
865 address addr = cast_from_oop<address>(obj) + offset;
866 jvalue value;
867 copy_to_jvalue(&value, addr, value_type);
868
869 // field index
870 reference_info.field.index = field->field_index();
871
872 // invoke the callback
873 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
874 &reference_info,
875 wrapper->klass_tag(),
876 wrapper->obj_tag_p(),
877 value,
878 value_type,
879 user_data);
880 if (res & JVMTI_VISIT_ABORT) {
881 return res;
882 }
883 }
884 return 0;
885 }
886
887
888 // VM operation to iterate over all objects in the heap (both reachable
889 // and unreachable)
890 class VM_HeapIterateOperation: public VM_Operation {
891 private:
892 ObjectClosure* _blk;
893 GrowableArray<jlong>* const _dead_objects;
894 public:
895 VM_HeapIterateOperation(ObjectClosure* blk, GrowableArray<jlong>* objects) :
896 _blk(blk), _dead_objects(objects) { }
897
898 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
899 void doit() {
900 // allows class files maps to be cached during iteration
901 ClassFieldMapCacheMark cm;
902
903 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects);
904
905 // make sure that heap is parsable (fills TLABs with filler objects)
906 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
907
908 // Verify heap before iteration - if the heap gets corrupted then
909 // JVMTI's IterateOverHeap will crash.
910 if (VerifyBeforeIteration) {
911 Universe::verify();
912 }
913
914 // do the iteration
915 Universe::heap()->object_iterate(_blk);
916 }
917
918 };
919
920
921 // An ObjectClosure used to support the deprecated IterateOverHeap and
922 // IterateOverInstancesOfClass functions
923 class IterateOverHeapObjectClosure: public ObjectClosure {
924 private:
925 JvmtiTagMap* _tag_map;
926 Klass* _klass;
927 jvmtiHeapObjectFilter _object_filter;
928 jvmtiHeapObjectCallback _heap_object_callback;
929 const void* _user_data;
930
931 // accessors
932 JvmtiTagMap* tag_map() const { return _tag_map; }
933 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
934 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
935 Klass* klass() const { return _klass; }
936 const void* user_data() const { return _user_data; }
937
938 // indicates if iteration has been aborted
939 bool _iteration_aborted;
940 bool is_iteration_aborted() const { return _iteration_aborted; }
941 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
942
943 public:
944 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
945 Klass* klass,
946 jvmtiHeapObjectFilter object_filter,
947 jvmtiHeapObjectCallback heap_object_callback,
948 const void* user_data) :
949 _tag_map(tag_map),
950 _klass(klass),
951 _object_filter(object_filter),
952 _heap_object_callback(heap_object_callback),
953 _user_data(user_data),
954 _iteration_aborted(false)
955 {
956 }
957
958 void do_object(oop o);
959 };
960
961 // invoked for each object in the heap
962 void IterateOverHeapObjectClosure::do_object(oop o) {
963 // check if iteration has been halted
964 if (is_iteration_aborted()) return;
965
966 // instanceof check when filtering by klass
967 if (klass() != NULL && !o->is_a(klass())) {
968 return;
969 }
970
971 // skip if object is a dormant shared object whose mirror hasn't been loaded
972 if (o != NULL && o->klass()->java_mirror() == NULL) {
973 log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(o),
974 o->klass()->external_name());
975 return;
976 }
977
978 // prepare for the calllback
979 CallbackWrapper wrapper(tag_map(), o);
980
981 // if the object is tagged and we're only interested in untagged objects
982 // then don't invoke the callback. Similiarly, if the object is untagged
983 // and we're only interested in tagged objects we skip the callback.
984 if (wrapper.obj_tag() != 0) {
985 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
986 } else {
987 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
988 }
989
990 // invoke the agent's callback
991 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
992 wrapper.obj_size(),
993 wrapper.obj_tag_p(),
994 (void*)user_data());
995 if (control == JVMTI_ITERATION_ABORT) {
996 set_iteration_aborted(true);
997 }
998 }
999
1000 // An ObjectClosure used to support the IterateThroughHeap function
1001 class IterateThroughHeapObjectClosure: public ObjectClosure {
1002 private:
1003 JvmtiTagMap* _tag_map;
1004 Klass* _klass;
1005 int _heap_filter;
1006 const jvmtiHeapCallbacks* _callbacks;
1007 const void* _user_data;
1008
1009 // accessor functions
1010 JvmtiTagMap* tag_map() const { return _tag_map; }
1011 int heap_filter() const { return _heap_filter; }
1012 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
1013 Klass* klass() const { return _klass; }
1014 const void* user_data() const { return _user_data; }
1015
1016 // indicates if the iteration has been aborted
1017 bool _iteration_aborted;
1018 bool is_iteration_aborted() const { return _iteration_aborted; }
1019
1020 // used to check the visit control flags. If the abort flag is set
1021 // then we set the iteration aborted flag so that the iteration completes
1022 // without processing any further objects
1023 bool check_flags_for_abort(jint flags) {
1024 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1025 if (is_abort) {
1026 _iteration_aborted = true;
1027 }
1028 return is_abort;
1029 }
1030
1031 public:
1032 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1033 Klass* klass,
1034 int heap_filter,
1035 const jvmtiHeapCallbacks* heap_callbacks,
1036 const void* user_data) :
1037 _tag_map(tag_map),
1038 _klass(klass),
1039 _heap_filter(heap_filter),
1040 _callbacks(heap_callbacks),
1041 _user_data(user_data),
1042 _iteration_aborted(false)
1043 {
1044 }
1045
1046 void do_object(oop o);
1047 };
1048
1049 // invoked for each object in the heap
1050 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1051 // check if iteration has been halted
1052 if (is_iteration_aborted()) return;
1053
1054 // apply class filter
1055 if (is_filtered_by_klass_filter(obj, klass())) return;
1056
1057 // skip if object is a dormant shared object whose mirror hasn't been loaded
1058 if (obj != NULL && obj->klass()->java_mirror() == NULL) {
1059 log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(obj),
1060 obj->klass()->external_name());
1061 return;
1062 }
1063
1064 // prepare for callback
1065 CallbackWrapper wrapper(tag_map(), obj);
1066
1067 // check if filtered by the heap filter
1068 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1069 return;
1070 }
1071
1072 // for arrays we need the length, otherwise -1
1073 bool is_array = obj->is_array();
1074 int len = is_array ? arrayOop(obj)->length() : -1;
1075
1076 // invoke the object callback (if callback is provided)
1077 if (callbacks()->heap_iteration_callback != NULL) {
1078 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1079 jint res = (*cb)(wrapper.klass_tag(),
1080 wrapper.obj_size(),
1081 wrapper.obj_tag_p(),
1082 (jint)len,
1083 (void*)user_data());
1084 if (check_flags_for_abort(res)) return;
1085 }
1086
1087 // for objects and classes we report primitive fields if callback provided
1088 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1089 jint res;
1090 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1091 if (obj->klass() == vmClasses::Class_klass()) {
1092 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1093 obj,
1094 cb,
1095 (void*)user_data());
1096 } else {
1097 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1098 obj,
1099 cb,
1100 (void*)user_data());
1101 }
1102 if (check_flags_for_abort(res)) return;
1103 }
1104
1105 // string callback
1106 if (!is_array &&
1107 callbacks()->string_primitive_value_callback != NULL &&
1108 obj->klass() == vmClasses::String_klass()) {
1109 jint res = invoke_string_value_callback(
1110 callbacks()->string_primitive_value_callback,
1111 &wrapper,
1112 obj,
1113 (void*)user_data() );
1114 if (check_flags_for_abort(res)) return;
1115 }
1116
1117 // array callback
1118 if (is_array &&
1119 callbacks()->array_primitive_value_callback != NULL &&
1120 obj->is_typeArray()) {
1121 jint res = invoke_array_primitive_value_callback(
1122 callbacks()->array_primitive_value_callback,
1123 &wrapper,
1124 obj,
1125 (void*)user_data() );
1126 if (check_flags_for_abort(res)) return;
1127 }
1128 };
1129
1130
1131 // Deprecated function to iterate over all objects in the heap
1132 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1133 Klass* klass,
1134 jvmtiHeapObjectCallback heap_object_callback,
1135 const void* user_data)
1136 {
1137 // EA based optimizations on tagged objects are already reverted.
1138 EscapeBarrier eb(object_filter == JVMTI_HEAP_OBJECT_UNTAGGED ||
1139 object_filter == JVMTI_HEAP_OBJECT_EITHER,
1140 JavaThread::current());
1141 eb.deoptimize_objects_all_threads();
1142 Arena dead_object_arena(mtServiceability);
1143 GrowableArray <jlong> dead_objects(&dead_object_arena, 10, 0, 0);
1144 {
1145 MutexLocker ml(Heap_lock);
1146 IterateOverHeapObjectClosure blk(this,
1147 klass,
1148 object_filter,
1149 heap_object_callback,
1150 user_data);
1151 VM_HeapIterateOperation op(&blk, &dead_objects);
1152 VMThread::execute(&op);
1153 }
1154 // Post events outside of Heap_lock
1155 post_dead_objects(&dead_objects);
1156 }
1157
1158
1159 // Iterates over all objects in the heap
1160 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1161 Klass* klass,
1162 const jvmtiHeapCallbacks* callbacks,
1163 const void* user_data)
1164 {
1165 // EA based optimizations on tagged objects are already reverted.
1166 EscapeBarrier eb(!(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED), JavaThread::current());
1167 eb.deoptimize_objects_all_threads();
1168
1169 Arena dead_object_arena(mtServiceability);
1170 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
1171 {
1172 MutexLocker ml(Heap_lock);
1173 IterateThroughHeapObjectClosure blk(this,
1174 klass,
1175 heap_filter,
1176 callbacks,
1177 user_data);
1178 VM_HeapIterateOperation op(&blk, &dead_objects);
1179 VMThread::execute(&op);
1180 }
1181 // Post events outside of Heap_lock
1182 post_dead_objects(&dead_objects);
1183 }
1184
1185 void JvmtiTagMap::remove_dead_entries_locked(GrowableArray<jlong>* objects) {
1186 assert(is_locked(), "precondition");
1187 if (_needs_cleaning) {
1188 // Recheck whether to post object free events under the lock.
1189 if (!env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1190 objects = NULL;
1191 }
1192 log_info(jvmti, table)("TagMap table needs cleaning%s",
1193 ((objects != NULL) ? " and posting" : ""));
1194 hashmap()->remove_dead_entries(objects);
1195 _needs_cleaning = false;
1196 }
1197 }
1198
1199 void JvmtiTagMap::remove_dead_entries(GrowableArray<jlong>* objects) {
1200 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1201 remove_dead_entries_locked(objects);
1202 }
1203
1204 void JvmtiTagMap::post_dead_objects(GrowableArray<jlong>* const objects) {
1205 assert(Thread::current()->is_Java_thread(), "Must post from JavaThread");
1206 if (objects != NULL && objects->length() > 0) {
1207 JvmtiExport::post_object_free(env(), objects);
1208 log_info(jvmti)("%d free object posted", objects->length());
1209 }
1210 }
1211
1212 void JvmtiTagMap::remove_and_post_dead_objects() {
1213 ResourceMark rm;
1214 GrowableArray<jlong> objects;
1215 remove_dead_entries(&objects);
1216 post_dead_objects(&objects);
1217 }
1218
1219 void JvmtiTagMap::flush_object_free_events() {
1220 assert_not_at_safepoint();
1221 if (env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1222 {
1223 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1224 // If another thread is posting events, let it finish
1225 while (_posting_events) {
1226 ml.wait();
1227 }
1228
1229 if (!_needs_cleaning || is_empty()) {
1230 _needs_cleaning = false;
1231 return;
1232 }
1233 _posting_events = true;
1234 } // Drop the lock so we can do the cleaning on the VM thread.
1235 // Needs both cleaning and event posting (up to some other thread
1236 // getting there first after we dropped the lock).
1237 remove_and_post_dead_objects();
1238 {
1239 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1240 _posting_events = false;
1241 ml.notify_all();
1242 }
1243 } else {
1244 remove_dead_entries(NULL);
1245 }
1246 }
1247
1248 // support class for get_objects_with_tags
1249
1250 class TagObjectCollector : public JvmtiTagMapEntryClosure {
1251 private:
1252 JvmtiEnv* _env;
1253 JavaThread* _thread;
1254 jlong* _tags;
1255 jint _tag_count;
1256 bool _some_dead_found;
1257
1258 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
1259 GrowableArray<uint64_t>* _tag_results; // collected tags
1260
1261 public:
1262 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) :
1263 _env(env),
1264 _thread(JavaThread::current()),
1265 _tags((jlong*)tags),
1266 _tag_count(tag_count),
1267 _some_dead_found(false),
1268 _object_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<jobject>(1, mtServiceability)),
1269 _tag_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<uint64_t>(1, mtServiceability)) { }
1270
1271 ~TagObjectCollector() {
1272 delete _object_results;
1273 delete _tag_results;
1274 }
1275
1276 bool some_dead_found() const { return _some_dead_found; }
1277
1278 // for each tagged object check if the tag value matches
1279 // - if it matches then we create a JNI local reference to the object
1280 // and record the reference and tag value.
1281 //
1282 void do_entry(JvmtiTagMapEntry* entry) {
1283 for (int i=0; i<_tag_count; i++) {
1284 if (_tags[i] == entry->tag()) {
1285 // The reference in this tag map could be the only (implicitly weak)
1286 // reference to that object. If we hand it out, we need to keep it live wrt
1287 // SATB marking similar to other j.l.ref.Reference referents. This is
1288 // achieved by using a phantom load in the object() accessor.
1289 oop o = entry->object();
1290 if (o == NULL) {
1291 _some_dead_found = true;
1292 // skip this whole entry
1293 return;
1294 }
1295 assert(o != NULL && Universe::heap()->is_in(o), "sanity check");
1296 jobject ref = JNIHandles::make_local(_thread, o);
1297 _object_results->append(ref);
1298 _tag_results->append((uint64_t)entry->tag());
1299 }
1300 }
1301 }
1302
1303 // return the results from the collection
1304 //
1305 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1306 jvmtiError error;
1307 int count = _object_results->length();
1308 assert(count >= 0, "sanity check");
1309
1310 // if object_result_ptr is not NULL then allocate the result and copy
1311 // in the object references.
1312 if (object_result_ptr != NULL) {
1313 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1314 if (error != JVMTI_ERROR_NONE) {
1315 return error;
1316 }
1317 for (int i=0; i<count; i++) {
1318 (*object_result_ptr)[i] = _object_results->at(i);
1319 }
1320 }
1321
1322 // if tag_result_ptr is not NULL then allocate the result and copy
1323 // in the tag values.
1324 if (tag_result_ptr != NULL) {
1325 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1326 if (error != JVMTI_ERROR_NONE) {
1327 if (object_result_ptr != NULL) {
1328 _env->Deallocate((unsigned char*)object_result_ptr);
1329 }
1330 return error;
1331 }
1332 for (int i=0; i<count; i++) {
1333 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1334 }
1335 }
1336
1337 *count_ptr = count;
1338 return JVMTI_ERROR_NONE;
1339 }
1340 };
1341
1342 // return the list of objects with the specified tags
1343 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1344 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1345
1346 TagObjectCollector collector(env(), tags, count);
1347 {
1348 // iterate over all tagged objects
1349 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1350 // Can't post ObjectFree events here from a JavaThread, so this
1351 // will race with the gc_notification thread in the tiny
1352 // window where the object is not marked but hasn't been notified that
1353 // it is collected yet.
1354 entry_iterate(&collector);
1355 }
1356 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1357 }
1358
1359 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1360 // (not performance critical as only used for roots)
1361 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1362 switch (kind) {
1363 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1364 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1365 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
1366 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
1367 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
1368 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
1369 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
1370 }
1371 }
1372
1373 // Base class for all heap walk contexts. The base class maintains a flag
1374 // to indicate if the context is valid or not.
1375 class HeapWalkContext {
1376 private:
1377 bool _valid;
1378 public:
1379 HeapWalkContext(bool valid) { _valid = valid; }
1380 void invalidate() { _valid = false; }
1381 bool is_valid() const { return _valid; }
1382 };
1383
1384 // A basic heap walk context for the deprecated heap walking functions.
1385 // The context for a basic heap walk are the callbacks and fields used by
1386 // the referrer caching scheme.
1387 class BasicHeapWalkContext: public HeapWalkContext {
1388 private:
1389 jvmtiHeapRootCallback _heap_root_callback;
1390 jvmtiStackReferenceCallback _stack_ref_callback;
1391 jvmtiObjectReferenceCallback _object_ref_callback;
1392
1393 // used for caching
1394 oop _last_referrer;
1395 jlong _last_referrer_tag;
1396
1397 public:
1398 BasicHeapWalkContext() : HeapWalkContext(false) { }
1399
1400 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1401 jvmtiStackReferenceCallback stack_ref_callback,
1402 jvmtiObjectReferenceCallback object_ref_callback) :
1403 HeapWalkContext(true),
1404 _heap_root_callback(heap_root_callback),
1405 _stack_ref_callback(stack_ref_callback),
1406 _object_ref_callback(object_ref_callback),
1407 _last_referrer(NULL),
1408 _last_referrer_tag(0) {
1409 }
1410
1411 // accessors
1412 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
1413 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
1414 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
1415
1416 oop last_referrer() const { return _last_referrer; }
1417 void set_last_referrer(oop referrer) { _last_referrer = referrer; }
1418 jlong last_referrer_tag() const { return _last_referrer_tag; }
1419 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1420 };
1421
1422 // The advanced heap walk context for the FollowReferences functions.
1423 // The context is the callbacks, and the fields used for filtering.
1424 class AdvancedHeapWalkContext: public HeapWalkContext {
1425 private:
1426 jint _heap_filter;
1427 Klass* _klass_filter;
1428 const jvmtiHeapCallbacks* _heap_callbacks;
1429
1430 public:
1431 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1432
1433 AdvancedHeapWalkContext(jint heap_filter,
1434 Klass* klass_filter,
1435 const jvmtiHeapCallbacks* heap_callbacks) :
1436 HeapWalkContext(true),
1437 _heap_filter(heap_filter),
1438 _klass_filter(klass_filter),
1439 _heap_callbacks(heap_callbacks) {
1440 }
1441
1442 // accessors
1443 jint heap_filter() const { return _heap_filter; }
1444 Klass* klass_filter() const { return _klass_filter; }
1445
1446 const jvmtiHeapReferenceCallback heap_reference_callback() const {
1447 return _heap_callbacks->heap_reference_callback;
1448 };
1449 const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1450 return _heap_callbacks->primitive_field_callback;
1451 }
1452 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1453 return _heap_callbacks->array_primitive_value_callback;
1454 }
1455 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1456 return _heap_callbacks->string_primitive_value_callback;
1457 }
1458 };
1459
1460 // The CallbackInvoker is a class with static functions that the heap walk can call
1461 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1462 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1463 // mode is for the newer FollowReferences function which supports a lot of
1464 // additional callbacks.
1465 class CallbackInvoker : AllStatic {
1466 private:
1467 // heap walk styles
1468 enum { basic, advanced };
1469 static int _heap_walk_type;
1470 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
1471 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
1472
1473 // context for basic style heap walk
1474 static BasicHeapWalkContext _basic_context;
1475 static BasicHeapWalkContext* basic_context() {
1476 assert(_basic_context.is_valid(), "invalid");
1477 return &_basic_context;
1478 }
1479
1480 // context for advanced style heap walk
1481 static AdvancedHeapWalkContext _advanced_context;
1482 static AdvancedHeapWalkContext* advanced_context() {
1483 assert(_advanced_context.is_valid(), "invalid");
1484 return &_advanced_context;
1485 }
1486
1487 // context needed for all heap walks
1488 static JvmtiTagMap* _tag_map;
1489 static const void* _user_data;
1490 static GrowableArray<oop>* _visit_stack;
1491 static JVMTIBitSet* _bitset;
1492
1493 // accessors
1494 static JvmtiTagMap* tag_map() { return _tag_map; }
1495 static const void* user_data() { return _user_data; }
1496 static GrowableArray<oop>* visit_stack() { return _visit_stack; }
1497
1498 // if the object hasn't been visited then push it onto the visit stack
1499 // so that it will be visited later
1500 static inline bool check_for_visit(oop obj) {
1501 if (!_bitset->is_marked(obj)) visit_stack()->push(obj);
1502 return true;
1503 }
1504
1505 // invoke basic style callbacks
1506 static inline bool invoke_basic_heap_root_callback
1507 (jvmtiHeapRootKind root_kind, oop obj);
1508 static inline bool invoke_basic_stack_ref_callback
1509 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1510 int slot, oop obj);
1511 static inline bool invoke_basic_object_reference_callback
1512 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1513
1514 // invoke advanced style callbacks
1515 static inline bool invoke_advanced_heap_root_callback
1516 (jvmtiHeapReferenceKind ref_kind, oop obj);
1517 static inline bool invoke_advanced_stack_ref_callback
1518 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1519 jmethodID method, jlocation bci, jint slot, oop obj);
1520 static inline bool invoke_advanced_object_reference_callback
1521 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1522
1523 // used to report the value of primitive fields
1524 static inline bool report_primitive_field
1525 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1526
1527 public:
1528 // initialize for basic mode
1529 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1530 GrowableArray<oop>* visit_stack,
1531 const void* user_data,
1532 BasicHeapWalkContext context,
1533 JVMTIBitSet* bitset);
1534
1535 // initialize for advanced mode
1536 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1537 GrowableArray<oop>* visit_stack,
1538 const void* user_data,
1539 AdvancedHeapWalkContext context,
1540 JVMTIBitSet* bitset);
1541
1542 // functions to report roots
1543 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1544 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1545 jmethodID m, oop o);
1546 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1547 jmethodID method, jlocation bci, jint slot, oop o);
1548
1549 // functions to report references
1550 static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1551 static inline bool report_class_reference(oop referrer, oop referree);
1552 static inline bool report_class_loader_reference(oop referrer, oop referree);
1553 static inline bool report_signers_reference(oop referrer, oop referree);
1554 static inline bool report_protection_domain_reference(oop referrer, oop referree);
1555 static inline bool report_superclass_reference(oop referrer, oop referree);
1556 static inline bool report_interface_reference(oop referrer, oop referree);
1557 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1558 static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1559 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1560 static inline bool report_primitive_array_values(oop array);
1561 static inline bool report_string_value(oop str);
1562 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1563 static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1564 };
1565
1566 // statics
1567 int CallbackInvoker::_heap_walk_type;
1568 BasicHeapWalkContext CallbackInvoker::_basic_context;
1569 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1570 JvmtiTagMap* CallbackInvoker::_tag_map;
1571 const void* CallbackInvoker::_user_data;
1572 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1573 JVMTIBitSet* CallbackInvoker::_bitset;
1574
1575 // initialize for basic heap walk (IterateOverReachableObjects et al)
1576 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1577 GrowableArray<oop>* visit_stack,
1578 const void* user_data,
1579 BasicHeapWalkContext context,
1580 JVMTIBitSet* bitset) {
1581 _tag_map = tag_map;
1582 _visit_stack = visit_stack;
1583 _user_data = user_data;
1584 _basic_context = context;
1585 _advanced_context.invalidate(); // will trigger assertion if used
1586 _heap_walk_type = basic;
1587 _bitset = bitset;
1588 }
1589
1590 // initialize for advanced heap walk (FollowReferences)
1591 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1592 GrowableArray<oop>* visit_stack,
1593 const void* user_data,
1594 AdvancedHeapWalkContext context,
1595 JVMTIBitSet* bitset) {
1596 _tag_map = tag_map;
1597 _visit_stack = visit_stack;
1598 _user_data = user_data;
1599 _advanced_context = context;
1600 _basic_context.invalidate(); // will trigger assertion if used
1601 _heap_walk_type = advanced;
1602 _bitset = bitset;
1603 }
1604
1605
1606 // invoke basic style heap root callback
1607 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1608 // if we heap roots should be reported
1609 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1610 if (cb == NULL) {
1611 return check_for_visit(obj);
1612 }
1613
1614 CallbackWrapper wrapper(tag_map(), obj);
1615 jvmtiIterationControl control = (*cb)(root_kind,
1616 wrapper.klass_tag(),
1617 wrapper.obj_size(),
1618 wrapper.obj_tag_p(),
1619 (void*)user_data());
1620 // push root to visit stack when following references
1621 if (control == JVMTI_ITERATION_CONTINUE &&
1622 basic_context()->object_ref_callback() != NULL) {
1623 visit_stack()->push(obj);
1624 }
1625 return control != JVMTI_ITERATION_ABORT;
1626 }
1627
1628 // invoke basic style stack ref callback
1629 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1630 jlong thread_tag,
1631 jint depth,
1632 jmethodID method,
1633 int slot,
1634 oop obj) {
1635 // if we stack refs should be reported
1636 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1637 if (cb == NULL) {
1638 return check_for_visit(obj);
1639 }
1640
1641 CallbackWrapper wrapper(tag_map(), obj);
1642 jvmtiIterationControl control = (*cb)(root_kind,
1643 wrapper.klass_tag(),
1644 wrapper.obj_size(),
1645 wrapper.obj_tag_p(),
1646 thread_tag,
1647 depth,
1648 method,
1649 slot,
1650 (void*)user_data());
1651 // push root to visit stack when following references
1652 if (control == JVMTI_ITERATION_CONTINUE &&
1653 basic_context()->object_ref_callback() != NULL) {
1654 visit_stack()->push(obj);
1655 }
1656 return control != JVMTI_ITERATION_ABORT;
1657 }
1658
1659 // invoke basic style object reference callback
1660 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
1661 oop referrer,
1662 oop referree,
1663 jint index) {
1664
1665 BasicHeapWalkContext* context = basic_context();
1666
1667 // callback requires the referrer's tag. If it's the same referrer
1668 // as the last call then we use the cached value.
1669 jlong referrer_tag;
1670 if (referrer == context->last_referrer()) {
1671 referrer_tag = context->last_referrer_tag();
1672 } else {
1673 referrer_tag = tag_for(tag_map(), referrer);
1674 }
1675
1676 // do the callback
1677 CallbackWrapper wrapper(tag_map(), referree);
1678 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
1679 jvmtiIterationControl control = (*cb)(ref_kind,
1680 wrapper.klass_tag(),
1681 wrapper.obj_size(),
1682 wrapper.obj_tag_p(),
1683 referrer_tag,
1684 index,
1685 (void*)user_data());
1686
1687 // record referrer and referrer tag. For self-references record the
1688 // tag value from the callback as this might differ from referrer_tag.
1689 context->set_last_referrer(referrer);
1690 if (referrer == referree) {
1691 context->set_last_referrer_tag(*wrapper.obj_tag_p());
1692 } else {
1693 context->set_last_referrer_tag(referrer_tag);
1694 }
1695
1696 if (control == JVMTI_ITERATION_CONTINUE) {
1697 return check_for_visit(referree);
1698 } else {
1699 return control != JVMTI_ITERATION_ABORT;
1700 }
1701 }
1702
1703 // invoke advanced style heap root callback
1704 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
1705 oop obj) {
1706 AdvancedHeapWalkContext* context = advanced_context();
1707
1708 // check that callback is provided
1709 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1710 if (cb == NULL) {
1711 return check_for_visit(obj);
1712 }
1713
1714 // apply class filter
1715 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1716 return check_for_visit(obj);
1717 }
1718
1719 // setup the callback wrapper
1720 CallbackWrapper wrapper(tag_map(), obj);
1721
1722 // apply tag filter
1723 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1724 wrapper.klass_tag(),
1725 context->heap_filter())) {
1726 return check_for_visit(obj);
1727 }
1728
1729 // for arrays we need the length, otherwise -1
1730 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1731
1732 // invoke the callback
1733 jint res = (*cb)(ref_kind,
1734 NULL, // referrer info
1735 wrapper.klass_tag(),
1736 0, // referrer_class_tag is 0 for heap root
1737 wrapper.obj_size(),
1738 wrapper.obj_tag_p(),
1739 NULL, // referrer_tag_p
1740 len,
1741 (void*)user_data());
1742 if (res & JVMTI_VISIT_ABORT) {
1743 return false;// referrer class tag
1744 }
1745 if (res & JVMTI_VISIT_OBJECTS) {
1746 check_for_visit(obj);
1747 }
1748 return true;
1749 }
1750
1751 // report a reference from a thread stack to an object
1752 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
1753 jlong thread_tag,
1754 jlong tid,
1755 int depth,
1756 jmethodID method,
1757 jlocation bci,
1758 jint slot,
1759 oop obj) {
1760 AdvancedHeapWalkContext* context = advanced_context();
1761
1762 // check that callback is provider
1763 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1764 if (cb == NULL) {
1765 return check_for_visit(obj);
1766 }
1767
1768 // apply class filter
1769 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1770 return check_for_visit(obj);
1771 }
1772
1773 // setup the callback wrapper
1774 CallbackWrapper wrapper(tag_map(), obj);
1775
1776 // apply tag filter
1777 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1778 wrapper.klass_tag(),
1779 context->heap_filter())) {
1780 return check_for_visit(obj);
1781 }
1782
1783 // setup the referrer info
1784 jvmtiHeapReferenceInfo reference_info;
1785 reference_info.stack_local.thread_tag = thread_tag;
1786 reference_info.stack_local.thread_id = tid;
1787 reference_info.stack_local.depth = depth;
1788 reference_info.stack_local.method = method;
1789 reference_info.stack_local.location = bci;
1790 reference_info.stack_local.slot = slot;
1791
1792 // for arrays we need the length, otherwise -1
1793 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1794
1795 // call into the agent
1796 int res = (*cb)(ref_kind,
1797 &reference_info,
1798 wrapper.klass_tag(),
1799 0, // referrer_class_tag is 0 for heap root (stack)
1800 wrapper.obj_size(),
1801 wrapper.obj_tag_p(),
1802 NULL, // referrer_tag is 0 for root
1803 len,
1804 (void*)user_data());
1805
1806 if (res & JVMTI_VISIT_ABORT) {
1807 return false;
1808 }
1809 if (res & JVMTI_VISIT_OBJECTS) {
1810 check_for_visit(obj);
1811 }
1812 return true;
1813 }
1814
1815 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
1816 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
1817 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
1818 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
1819 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
1820 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
1821 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
1822 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
1823
1824 // invoke the object reference callback to report a reference
1825 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
1826 oop referrer,
1827 oop obj,
1828 jint index)
1829 {
1830 // field index is only valid field in reference_info
1831 static jvmtiHeapReferenceInfo reference_info = { 0 };
1832
1833 AdvancedHeapWalkContext* context = advanced_context();
1834
1835 // check that callback is provider
1836 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1837 if (cb == NULL) {
1838 return check_for_visit(obj);
1839 }
1840
1841 // apply class filter
1842 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1843 return check_for_visit(obj);
1844 }
1845
1846 // setup the callback wrapper
1847 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
1848
1849 // apply tag filter
1850 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1851 wrapper.klass_tag(),
1852 context->heap_filter())) {
1853 return check_for_visit(obj);
1854 }
1855
1856 // field index is only valid field in reference_info
1857 reference_info.field.index = index;
1858
1859 // for arrays we need the length, otherwise -1
1860 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1861
1862 // invoke the callback
1863 int res = (*cb)(ref_kind,
1864 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
1865 wrapper.klass_tag(),
1866 wrapper.referrer_klass_tag(),
1867 wrapper.obj_size(),
1868 wrapper.obj_tag_p(),
1869 wrapper.referrer_tag_p(),
1870 len,
1871 (void*)user_data());
1872
1873 if (res & JVMTI_VISIT_ABORT) {
1874 return false;
1875 }
1876 if (res & JVMTI_VISIT_OBJECTS) {
1877 check_for_visit(obj);
1878 }
1879 return true;
1880 }
1881
1882 // report a "simple root"
1883 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
1884 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
1885 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
1886
1887 if (is_basic_heap_walk()) {
1888 // map to old style root kind
1889 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
1890 return invoke_basic_heap_root_callback(root_kind, obj);
1891 } else {
1892 assert(is_advanced_heap_walk(), "wrong heap walk type");
1893 return invoke_advanced_heap_root_callback(kind, obj);
1894 }
1895 }
1896
1897
1898 // invoke the primitive array values
1899 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
1900 assert(obj->is_typeArray(), "not a primitive array");
1901
1902 AdvancedHeapWalkContext* context = advanced_context();
1903 assert(context->array_primitive_value_callback() != NULL, "no callback");
1904
1905 // apply class filter
1906 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1907 return true;
1908 }
1909
1910 CallbackWrapper wrapper(tag_map(), obj);
1911
1912 // apply tag filter
1913 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1914 wrapper.klass_tag(),
1915 context->heap_filter())) {
1916 return true;
1917 }
1918
1919 // invoke the callback
1920 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
1921 &wrapper,
1922 obj,
1923 (void*)user_data());
1924 return (!(res & JVMTI_VISIT_ABORT));
1925 }
1926
1927 // invoke the string value callback
1928 inline bool CallbackInvoker::report_string_value(oop str) {
1929 assert(str->klass() == vmClasses::String_klass(), "not a string");
1930
1931 AdvancedHeapWalkContext* context = advanced_context();
1932 assert(context->string_primitive_value_callback() != NULL, "no callback");
1933
1934 // apply class filter
1935 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
1936 return true;
1937 }
1938
1939 CallbackWrapper wrapper(tag_map(), str);
1940
1941 // apply tag filter
1942 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1943 wrapper.klass_tag(),
1944 context->heap_filter())) {
1945 return true;
1946 }
1947
1948 // invoke the callback
1949 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
1950 &wrapper,
1951 str,
1952 (void*)user_data());
1953 return (!(res & JVMTI_VISIT_ABORT));
1954 }
1955
1956 // invoke the primitive field callback
1957 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
1958 oop obj,
1959 jint index,
1960 address addr,
1961 char type)
1962 {
1963 // for primitive fields only the index will be set
1964 static jvmtiHeapReferenceInfo reference_info = { 0 };
1965
1966 AdvancedHeapWalkContext* context = advanced_context();
1967 assert(context->primitive_field_callback() != NULL, "no callback");
1968
1969 // apply class filter
1970 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1971 return true;
1972 }
1973
1974 CallbackWrapper wrapper(tag_map(), obj);
1975
1976 // apply tag filter
1977 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1978 wrapper.klass_tag(),
1979 context->heap_filter())) {
1980 return true;
1981 }
1982
1983 // the field index in the referrer
1984 reference_info.field.index = index;
1985
1986 // map the type
1987 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1988
1989 // setup the jvalue
1990 jvalue value;
1991 copy_to_jvalue(&value, addr, value_type);
1992
1993 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
1994 int res = (*cb)(ref_kind,
1995 &reference_info,
1996 wrapper.klass_tag(),
1997 wrapper.obj_tag_p(),
1998 value,
1999 value_type,
2000 (void*)user_data());
2001 return (!(res & JVMTI_VISIT_ABORT));
2002 }
2003
2004
2005 // instance field
2006 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2007 jint index,
2008 address value,
2009 char type) {
2010 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2011 obj,
2012 index,
2013 value,
2014 type);
2015 }
2016
2017 // static field
2018 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2019 jint index,
2020 address value,
2021 char type) {
2022 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2023 obj,
2024 index,
2025 value,
2026 type);
2027 }
2028
2029 // report a JNI local (root object) to the profiler
2030 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2031 if (is_basic_heap_walk()) {
2032 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2033 thread_tag,
2034 depth,
2035 m,
2036 -1,
2037 obj);
2038 } else {
2039 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2040 thread_tag, tid,
2041 depth,
2042 m,
2043 (jlocation)-1,
2044 -1,
2045 obj);
2046 }
2047 }
2048
2049
2050 // report a local (stack reference, root object)
2051 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2052 jlong tid,
2053 jint depth,
2054 jmethodID method,
2055 jlocation bci,
2056 jint slot,
2057 oop obj) {
2058 if (is_basic_heap_walk()) {
2059 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2060 thread_tag,
2061 depth,
2062 method,
2063 slot,
2064 obj);
2065 } else {
2066 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2067 thread_tag,
2068 tid,
2069 depth,
2070 method,
2071 bci,
2072 slot,
2073 obj);
2074 }
2075 }
2076
2077 // report an object referencing a class.
2078 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2079 if (is_basic_heap_walk()) {
2080 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2081 } else {
2082 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2083 }
2084 }
2085
2086 // report a class referencing its class loader.
2087 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2088 if (is_basic_heap_walk()) {
2089 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2090 } else {
2091 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2092 }
2093 }
2094
2095 // report a class referencing its signers.
2096 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2097 if (is_basic_heap_walk()) {
2098 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2099 } else {
2100 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2101 }
2102 }
2103
2104 // report a class referencing its protection domain..
2105 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2106 if (is_basic_heap_walk()) {
2107 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2108 } else {
2109 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2110 }
2111 }
2112
2113 // report a class referencing its superclass.
2114 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2115 if (is_basic_heap_walk()) {
2116 // Send this to be consistent with past implementation
2117 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2118 } else {
2119 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2120 }
2121 }
2122
2123 // report a class referencing one of its interfaces.
2124 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2125 if (is_basic_heap_walk()) {
2126 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2127 } else {
2128 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2129 }
2130 }
2131
2132 // report a class referencing one of its static fields.
2133 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2134 if (is_basic_heap_walk()) {
2135 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2136 } else {
2137 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2138 }
2139 }
2140
2141 // report an array referencing an element object
2142 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2143 if (is_basic_heap_walk()) {
2144 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2145 } else {
2146 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2147 }
2148 }
2149
2150 // report an object referencing an instance field object
2151 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2152 if (is_basic_heap_walk()) {
2153 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2154 } else {
2155 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2156 }
2157 }
2158
2159 // report an array referencing an element object
2160 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2161 if (is_basic_heap_walk()) {
2162 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2163 } else {
2164 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2165 }
2166 }
2167
2168 // A supporting closure used to process simple roots
2169 class SimpleRootsClosure : public OopClosure {
2170 private:
2171 jvmtiHeapReferenceKind _kind;
2172 bool _continue;
2173
2174 jvmtiHeapReferenceKind root_kind() { return _kind; }
2175
2176 public:
2177 void set_kind(jvmtiHeapReferenceKind kind) {
2178 _kind = kind;
2179 _continue = true;
2180 }
2181
2182 inline bool stopped() {
2183 return !_continue;
2184 }
2185
2186 void do_oop(oop* obj_p) {
2187 // iteration has terminated
2188 if (stopped()) {
2189 return;
2190 }
2191
2192 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p);
2193 // ignore null
2194 if (o == NULL) {
2195 return;
2196 }
2197
2198 assert(Universe::heap()->is_in(o), "should be impossible");
2199
2200 jvmtiHeapReferenceKind kind = root_kind();
2201
2202 // invoke the callback
2203 _continue = CallbackInvoker::report_simple_root(kind, o);
2204
2205 }
2206 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2207 };
2208
2209 // A supporting closure used to process JNI locals
2210 class JNILocalRootsClosure : public OopClosure {
2211 private:
2212 jlong _thread_tag;
2213 jlong _tid;
2214 jint _depth;
2215 jmethodID _method;
2216 bool _continue;
2217 public:
2218 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2219 _thread_tag = thread_tag;
2220 _tid = tid;
2221 _depth = depth;
2222 _method = method;
2223 _continue = true;
2224 }
2225
2226 inline bool stopped() {
2227 return !_continue;
2228 }
2229
2230 void do_oop(oop* obj_p) {
2231 // iteration has terminated
2232 if (stopped()) {
2233 return;
2234 }
2235
2236 oop o = *obj_p;
2237 // ignore null
2238 if (o == NULL) {
2239 return;
2240 }
2241
2242 // invoke the callback
2243 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2244 }
2245 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2246 };
2247
2248
2249 // A VM operation to iterate over objects that are reachable from
2250 // a set of roots or an initial object.
2251 //
2252 // For VM_HeapWalkOperation the set of roots used is :-
2253 //
2254 // - All JNI global references
2255 // - All inflated monitors
2256 // - All classes loaded by the boot class loader (or all classes
2257 // in the event that class unloading is disabled)
2258 // - All java threads
2259 // - For each java thread then all locals and JNI local references
2260 // on the thread's execution stack
2261 // - All visible/explainable objects from Universes::oops_do
2262 //
2263 class VM_HeapWalkOperation: public VM_Operation {
2264 private:
2265 enum {
2266 initial_visit_stack_size = 4000
2267 };
2268
2269 bool _is_advanced_heap_walk; // indicates FollowReferences
2270 JvmtiTagMap* _tag_map;
2271 Handle _initial_object;
2272 GrowableArray<oop>* _visit_stack; // the visit stack
2273
2274 JVMTIBitSet _bitset;
2275
2276 // Dead object tags in JvmtiTagMap
2277 GrowableArray<jlong>* _dead_objects;
2278
2279 bool _following_object_refs; // are we following object references
2280
2281 bool _reporting_primitive_fields; // optional reporting
2282 bool _reporting_primitive_array_values;
2283 bool _reporting_string_values;
2284
2285 GrowableArray<oop>* create_visit_stack() {
2286 return new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<oop>(initial_visit_stack_size, mtServiceability);
2287 }
2288
2289 // accessors
2290 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
2291 JvmtiTagMap* tag_map() const { return _tag_map; }
2292 Handle initial_object() const { return _initial_object; }
2293
2294 bool is_following_references() const { return _following_object_refs; }
2295
2296 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
2297 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2298 bool is_reporting_string_values() const { return _reporting_string_values; }
2299
2300 GrowableArray<oop>* visit_stack() const { return _visit_stack; }
2301
2302 // iterate over the various object types
2303 inline bool iterate_over_array(oop o);
2304 inline bool iterate_over_type_array(oop o);
2305 inline bool iterate_over_class(oop o);
2306 inline bool iterate_over_object(oop o);
2307
2308 // root collection
2309 inline bool collect_simple_roots();
2310 inline bool collect_stack_roots();
2311 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2312
2313 // visit an object
2314 inline bool visit(oop o);
2315
2316 public:
2317 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2318 Handle initial_object,
2319 BasicHeapWalkContext callbacks,
2320 const void* user_data,
2321 GrowableArray<jlong>* objects);
2322
2323 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2324 Handle initial_object,
2325 AdvancedHeapWalkContext callbacks,
2326 const void* user_data,
2327 GrowableArray<jlong>* objects);
2328
2329 ~VM_HeapWalkOperation();
2330
2331 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2332 void doit();
2333 };
2334
2335
2336 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2337 Handle initial_object,
2338 BasicHeapWalkContext callbacks,
2339 const void* user_data,
2340 GrowableArray<jlong>* objects) {
2341 _is_advanced_heap_walk = false;
2342 _tag_map = tag_map;
2343 _initial_object = initial_object;
2344 _following_object_refs = (callbacks.object_ref_callback() != NULL);
2345 _reporting_primitive_fields = false;
2346 _reporting_primitive_array_values = false;
2347 _reporting_string_values = false;
2348 _visit_stack = create_visit_stack();
2349 _dead_objects = objects;
2350
2351
2352 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks, &_bitset);
2353 }
2354
2355 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2356 Handle initial_object,
2357 AdvancedHeapWalkContext callbacks,
2358 const void* user_data,
2359 GrowableArray<jlong>* objects) {
2360 _is_advanced_heap_walk = true;
2361 _tag_map = tag_map;
2362 _initial_object = initial_object;
2363 _following_object_refs = true;
2364 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2365 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2366 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2367 _visit_stack = create_visit_stack();
2368 _dead_objects = objects;
2369
2370 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks, &_bitset);
2371 }
2372
2373 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2374 if (_following_object_refs) {
2375 assert(_visit_stack != NULL, "checking");
2376 delete _visit_stack;
2377 _visit_stack = NULL;
2378 }
2379 }
2380
2381 // an array references its class and has a reference to
2382 // each element in the array
2383 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2384 objArrayOop array = objArrayOop(o);
2385
2386 // array reference to its class
2387 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2388 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2389 return false;
2390 }
2391
2392 // iterate over the array and report each reference to a
2393 // non-null element
2394 for (int index=0; index<array->length(); index++) {
2395 oop elem = array->obj_at(index);
2396 if (elem == NULL) {
2397 continue;
2398 }
2399
2400 // report the array reference o[index] = elem
2401 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2402 return false;
2403 }
2404 }
2405 return true;
2406 }
2407
2408 // a type array references its class
2409 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2410 Klass* k = o->klass();
2411 oop mirror = k->java_mirror();
2412 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2413 return false;
2414 }
2415
2416 // report the array contents if required
2417 if (is_reporting_primitive_array_values()) {
2418 if (!CallbackInvoker::report_primitive_array_values(o)) {
2419 return false;
2420 }
2421 }
2422 return true;
2423 }
2424
2425 #ifdef ASSERT
2426 // verify that a static oop field is in range
2427 static inline bool verify_static_oop(InstanceKlass* ik,
2428 oop mirror, int offset) {
2429 address obj_p = cast_from_oop<address>(mirror) + offset;
2430 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2431 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2432 assert(end >= start, "sanity check");
2433
2434 if (obj_p >= start && obj_p < end) {
2435 return true;
2436 } else {
2437 return false;
2438 }
2439 }
2440 #endif // #ifdef ASSERT
2441
2442 // a class references its super class, interfaces, class loader, ...
2443 // and finally its static fields
2444 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2445 int i;
2446 Klass* klass = java_lang_Class::as_Klass(java_class);
2447
2448 if (klass->is_instance_klass()) {
2449 InstanceKlass* ik = InstanceKlass::cast(klass);
2450
2451 // Ignore the class if it hasn't been initialized yet
2452 if (!ik->is_linked()) {
2453 return true;
2454 }
2455
2456 // get the java mirror
2457 oop mirror = klass->java_mirror();
2458
2459 // super (only if something more interesting than java.lang.Object)
2460 InstanceKlass* java_super = ik->java_super();
2461 if (java_super != NULL && java_super != vmClasses::Object_klass()) {
2462 oop super = java_super->java_mirror();
2463 if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2464 return false;
2465 }
2466 }
2467
2468 // class loader
2469 oop cl = ik->class_loader();
2470 if (cl != NULL) {
2471 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2472 return false;
2473 }
2474 }
2475
2476 // protection domain
2477 oop pd = ik->protection_domain();
2478 if (pd != NULL) {
2479 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2480 return false;
2481 }
2482 }
2483
2484 // signers
2485 oop signers = ik->signers();
2486 if (signers != NULL) {
2487 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2488 return false;
2489 }
2490 }
2491
2492 // references from the constant pool
2493 {
2494 ConstantPool* pool = ik->constants();
2495 for (int i = 1; i < pool->length(); i++) {
2496 constantTag tag = pool->tag_at(i).value();
2497 if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) {
2498 oop entry;
2499 if (tag.is_string()) {
2500 entry = pool->resolved_string_at(i);
2501 // If the entry is non-null it is resolved.
2502 if (entry == NULL) {
2503 continue;
2504 }
2505 } else if (tag.is_klass()) {
2506 entry = pool->resolved_klass_at(i)->java_mirror();
2507 } else {
2508 // Code generated by JIT compilers might not resolve constant
2509 // pool entries. Treat them as resolved if they are loaded.
2510 assert(tag.is_unresolved_klass(), "must be");
2511 constantPoolHandle cp(Thread::current(), pool);
2512 Klass* klass = ConstantPool::klass_at_if_loaded(cp, i);
2513 if (klass == NULL) {
2514 continue;
2515 }
2516 entry = klass->java_mirror();
2517 }
2518 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2519 return false;
2520 }
2521 }
2522 }
2523 }
2524
2525 // interfaces
2526 // (These will already have been reported as references from the constant pool
2527 // but are specified by IterateOverReachableObjects and must be reported).
2528 Array<InstanceKlass*>* interfaces = ik->local_interfaces();
2529 for (i = 0; i < interfaces->length(); i++) {
2530 oop interf = interfaces->at(i)->java_mirror();
2531 if (interf == NULL) {
2532 continue;
2533 }
2534 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2535 return false;
2536 }
2537 }
2538
2539 // iterate over the static fields
2540
2541 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2542 for (i=0; i<field_map->field_count(); i++) {
2543 ClassFieldDescriptor* field = field_map->field_at(i);
2544 char type = field->field_type();
2545 if (!is_primitive_field_type(type)) {
2546 oop fld_o = mirror->obj_field(field->field_offset());
2547 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2548 if (fld_o != NULL) {
2549 int slot = field->field_index();
2550 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2551 delete field_map;
2552 return false;
2553 }
2554 }
2555 } else {
2556 if (is_reporting_primitive_fields()) {
2557 address addr = cast_from_oop<address>(mirror) + field->field_offset();
2558 int slot = field->field_index();
2559 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2560 delete field_map;
2561 return false;
2562 }
2563 }
2564 }
2565 }
2566 delete field_map;
2567
2568 return true;
2569 }
2570
2571 return true;
2572 }
2573
2574 // an object references a class and its instance fields
2575 // (static fields are ignored here as we report these as
2576 // references from the class).
2577 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2578 // reference to the class
2579 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2580 return false;
2581 }
2582
2583 // iterate over instance fields
2584 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2585 for (int i=0; i<field_map->field_count(); i++) {
2586 ClassFieldDescriptor* field = field_map->field_at(i);
2587 char type = field->field_type();
2588 if (!is_primitive_field_type(type)) {
2589 oop fld_o = o->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field->field_offset());
2590 // ignore any objects that aren't visible to profiler
2591 if (fld_o != NULL) {
2592 assert(Universe::heap()->is_in(fld_o), "unsafe code should not "
2593 "have references to Klass* anymore");
2594 int slot = field->field_index();
2595 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2596 return false;
2597 }
2598 }
2599 } else {
2600 if (is_reporting_primitive_fields()) {
2601 // primitive instance field
2602 address addr = cast_from_oop<address>(o) + field->field_offset();
2603 int slot = field->field_index();
2604 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2605 return false;
2606 }
2607 }
2608 }
2609 }
2610
2611 // if the object is a java.lang.String
2612 if (is_reporting_string_values() &&
2613 o->klass() == vmClasses::String_klass()) {
2614 if (!CallbackInvoker::report_string_value(o)) {
2615 return false;
2616 }
2617 }
2618 return true;
2619 }
2620
2621
2622 // Collects all simple (non-stack) roots except for threads;
2623 // threads are handled in collect_stack_roots() as an optimization.
2624 // if there's a heap root callback provided then the callback is
2625 // invoked for each simple root.
2626 // if an object reference callback is provided then all simple
2627 // roots are pushed onto the marking stack so that they can be
2628 // processed later
2629 //
2630 inline bool VM_HeapWalkOperation::collect_simple_roots() {
2631 SimpleRootsClosure blk;
2632
2633 // JNI globals
2634 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
2635 JNIHandles::oops_do(&blk);
2636 if (blk.stopped()) {
2637 return false;
2638 }
2639
2640 // Preloaded classes and loader from the system dictionary
2641 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
2642 CLDToOopClosure cld_closure(&blk, false);
2643 ClassLoaderDataGraph::always_strong_cld_do(&cld_closure);
2644 if (blk.stopped()) {
2645 return false;
2646 }
2647
2648 // threads are now handled in collect_stack_roots()
2649
2650 // Other kinds of roots maintained by HotSpot
2651 // Many of these won't be visible but others (such as instances of important
2652 // exceptions) will be visible.
2653 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
2654 Universe::vm_global()->oops_do(&blk);
2655 if (blk.stopped()) {
2656 return false;
2657 }
2658
2659 return true;
2660 }
2661
2662 // Walk the stack of a given thread and find all references (locals
2663 // and JNI calls) and report these as stack references
2664 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
2665 JNILocalRootsClosure* blk)
2666 {
2667 oop threadObj = java_thread->threadObj();
2668 assert(threadObj != NULL, "sanity check");
2669
2670 // only need to get the thread's tag once per thread
2671 jlong thread_tag = tag_for(_tag_map, threadObj);
2672
2673 // also need the thread id
2674 jlong tid = java_lang_Thread::thread_id(threadObj);
2675
2676
2677 if (java_thread->has_last_Java_frame()) {
2678
2679 // vframes are resource allocated
2680 Thread* current_thread = Thread::current();
2681 ResourceMark rm(current_thread);
2682 HandleMark hm(current_thread);
2683
2684 RegisterMap reg_map(java_thread);
2685 frame f = java_thread->last_frame();
2686 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread);
2687
2688 bool is_top_frame = true;
2689 int depth = 0;
2690 frame* last_entry_frame = NULL;
2691
2692 while (vf != NULL) {
2693 if (vf->is_java_frame()) {
2694
2695 // java frame (interpreted, compiled, ...)
2696 javaVFrame *jvf = javaVFrame::cast(vf);
2697
2698 // the jmethodID
2699 jmethodID method = jvf->method()->jmethod_id();
2700
2701 if (!(jvf->method()->is_native())) {
2702 jlocation bci = (jlocation)jvf->bci();
2703 StackValueCollection* locals = jvf->locals();
2704 for (int slot=0; slot<locals->size(); slot++) {
2705 if (locals->at(slot)->type() == T_OBJECT) {
2706 oop o = locals->obj_at(slot)();
2707 if (o == NULL) {
2708 continue;
2709 }
2710
2711 // stack reference
2712 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
2713 bci, slot, o)) {
2714 return false;
2715 }
2716 }
2717 }
2718
2719 StackValueCollection* exprs = jvf->expressions();
2720 for (int index=0; index < exprs->size(); index++) {
2721 if (exprs->at(index)->type() == T_OBJECT) {
2722 oop o = exprs->obj_at(index)();
2723 if (o == NULL) {
2724 continue;
2725 }
2726
2727 // stack reference
2728 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
2729 bci, locals->size() + index, o)) {
2730 return false;
2731 }
2732 }
2733 }
2734
2735 // Follow oops from compiled nmethod
2736 if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
2737 blk->set_context(thread_tag, tid, depth, method);
2738 jvf->cb()->as_nmethod()->oops_do(blk);
2739 }
2740 } else {
2741 blk->set_context(thread_tag, tid, depth, method);
2742 if (is_top_frame) {
2743 // JNI locals for the top frame.
2744 java_thread->active_handles()->oops_do(blk);
2745 } else {
2746 if (last_entry_frame != NULL) {
2747 // JNI locals for the entry frame
2748 assert(last_entry_frame->is_entry_frame(), "checking");
2749 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
2750 }
2751 }
2752 }
2753 last_entry_frame = NULL;
2754 depth++;
2755 } else {
2756 // externalVFrame - for an entry frame then we report the JNI locals
2757 // when we find the corresponding javaVFrame
2758 frame* fr = vf->frame_pointer();
2759 assert(fr != NULL, "sanity check");
2760 if (fr->is_entry_frame()) {
2761 last_entry_frame = fr;
2762 }
2763 }
2764
2765 vf = vf->sender();
2766 is_top_frame = false;
2767 }
2768 } else {
2769 // no last java frame but there may be JNI locals
2770 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
2771 java_thread->active_handles()->oops_do(blk);
2772 }
2773 return true;
2774 }
2775
2776
2777 // Collects the simple roots for all threads and collects all
2778 // stack roots - for each thread it walks the execution
2779 // stack to find all references and local JNI refs.
2780 inline bool VM_HeapWalkOperation::collect_stack_roots() {
2781 JNILocalRootsClosure blk;
2782 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
2783 oop threadObj = thread->threadObj();
2784 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
2785 // Collect the simple root for this thread before we
2786 // collect its stack roots
2787 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
2788 threadObj)) {
2789 return false;
2790 }
2791 if (!collect_stack_roots(thread, &blk)) {
2792 return false;
2793 }
2794 }
2795 }
2796 return true;
2797 }
2798
2799 // visit an object
2800 // first mark the object as visited
2801 // second get all the outbound references from this object (in other words, all
2802 // the objects referenced by this object).
2803 //
2804 bool VM_HeapWalkOperation::visit(oop o) {
2805 // mark object as visited
2806 assert(!_bitset.is_marked(o), "can't visit same object more than once");
2807 _bitset.mark_obj(o);
2808
2809 // instance
2810 if (o->is_instance()) {
2811 if (o->klass() == vmClasses::Class_klass()) {
2812 if (!java_lang_Class::is_primitive(o)) {
2813 // a java.lang.Class
2814 return iterate_over_class(o);
2815 }
2816 } else {
2817 return iterate_over_object(o);
2818 }
2819 }
2820
2821 // object array
2822 if (o->is_objArray()) {
2823 return iterate_over_array(o);
2824 }
2825
2826 // type array
2827 if (o->is_typeArray()) {
2828 return iterate_over_type_array(o);
2829 }
2830
2831 return true;
2832 }
2833
2834 void VM_HeapWalkOperation::doit() {
2835 ResourceMark rm;
2836 ClassFieldMapCacheMark cm;
2837
2838 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects);
2839
2840 assert(visit_stack()->is_empty(), "visit stack must be empty");
2841
2842 // the heap walk starts with an initial object or the heap roots
2843 if (initial_object().is_null()) {
2844 // can result in a big performance boost for an agent that is
2845 // focused on analyzing references in the thread stacks.
2846 if (!collect_stack_roots()) return;
2847
2848 if (!collect_simple_roots()) return;
2849 } else {
2850 visit_stack()->push(initial_object()());
2851 }
2852
2853 // object references required
2854 if (is_following_references()) {
2855
2856 // visit each object until all reachable objects have been
2857 // visited or the callback asked to terminate the iteration.
2858 while (!visit_stack()->is_empty()) {
2859 oop o = visit_stack()->pop();
2860 if (!_bitset.is_marked(o)) {
2861 if (!visit(o)) {
2862 break;
2863 }
2864 }
2865 }
2866 }
2867 }
2868
2869 // iterate over all objects that are reachable from a set of roots
2870 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
2871 jvmtiStackReferenceCallback stack_ref_callback,
2872 jvmtiObjectReferenceCallback object_ref_callback,
2873 const void* user_data) {
2874 JavaThread* jt = JavaThread::current();
2875 EscapeBarrier eb(true, jt);
2876 eb.deoptimize_objects_all_threads();
2877 Arena dead_object_arena(mtServiceability);
2878 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
2879 {
2880 MutexLocker ml(Heap_lock);
2881 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
2882 VM_HeapWalkOperation op(this, Handle(), context, user_data, &dead_objects);
2883 VMThread::execute(&op);
2884 }
2885 // Post events outside of Heap_lock
2886 post_dead_objects(&dead_objects);
2887 }
2888
2889 // iterate over all objects that are reachable from a given object
2890 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
2891 jvmtiObjectReferenceCallback object_ref_callback,
2892 const void* user_data) {
2893 oop obj = JNIHandles::resolve(object);
2894 Handle initial_object(Thread::current(), obj);
2895
2896 Arena dead_object_arena(mtServiceability);
2897 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
2898 {
2899 MutexLocker ml(Heap_lock);
2900 BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
2901 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects);
2902 VMThread::execute(&op);
2903 }
2904 // Post events outside of Heap_lock
2905 post_dead_objects(&dead_objects);
2906 }
2907
2908 // follow references from an initial object or the GC roots
2909 void JvmtiTagMap::follow_references(jint heap_filter,
2910 Klass* klass,
2911 jobject object,
2912 const jvmtiHeapCallbacks* callbacks,
2913 const void* user_data)
2914 {
2915 oop obj = JNIHandles::resolve(object);
2916 JavaThread* jt = JavaThread::current();
2917 Handle initial_object(jt, obj);
2918 // EA based optimizations that are tagged or reachable from initial_object are already reverted.
2919 EscapeBarrier eb(initial_object.is_null() &&
2920 !(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED),
2921 jt);
2922 eb.deoptimize_objects_all_threads();
2923
2924 Arena dead_object_arena(mtServiceability);
2925 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
2926 {
2927 MutexLocker ml(Heap_lock);
2928 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
2929 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects);
2930 VMThread::execute(&op);
2931 }
2932 // Post events outside of Heap_lock
2933 post_dead_objects(&dead_objects);
2934 }
2935
2936 // Concurrent GC needs to call this in relocation pause, so after the objects are moved
2937 // and have their new addresses, the table can be rehashed.
2938 void JvmtiTagMap::set_needs_rehashing() {
2939 assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause");
2940 assert(Thread::current()->is_VM_thread(), "should be the VM thread");
2941
2942 JvmtiEnvIterator it;
2943 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
2944 JvmtiTagMap* tag_map = env->tag_map_acquire();
2945 if (tag_map != NULL) {
2946 tag_map->_needs_rehashing = true;
2947 }
2948 }
2949 }
2950
2951 // Verify gc_notification follows set_needs_cleaning.
2952 DEBUG_ONLY(static bool notified_needs_cleaning = false;)
2953
2954 void JvmtiTagMap::set_needs_cleaning() {
2955 assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause");
2956 assert(Thread::current()->is_VM_thread(), "should be the VM thread");
2957 // Can't assert !notified_needs_cleaning; a partial GC might be upgraded
2958 // to a full GC and do this twice without intervening gc_notification.
2959 DEBUG_ONLY(notified_needs_cleaning = true;)
2960
2961 JvmtiEnvIterator it;
2962 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
2963 JvmtiTagMap* tag_map = env->tag_map_acquire();
2964 if (tag_map != NULL) {
2965 tag_map->_needs_cleaning = !tag_map->is_empty();
2966 }
2967 }
2968 }
2969
2970 void JvmtiTagMap::gc_notification(size_t num_dead_entries) {
2971 assert(notified_needs_cleaning, "missing GC notification");
2972 DEBUG_ONLY(notified_needs_cleaning = false;)
2973
2974 // Notify ServiceThread if there's work to do.
2975 {
2976 MonitorLocker ml(Service_lock, Mutex::_no_safepoint_check_flag);
2977 _has_object_free_events = (num_dead_entries != 0);
2978 if (_has_object_free_events) ml.notify_all();
2979 }
2980
2981 // If no dead entries then cancel cleaning requests.
2982 if (num_dead_entries == 0) {
2983 JvmtiEnvIterator it;
2984 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
2985 JvmtiTagMap* tag_map = env->tag_map_acquire();
2986 if (tag_map != NULL) {
2987 MutexLocker ml (tag_map->lock(), Mutex::_no_safepoint_check_flag);
2988 tag_map->_needs_cleaning = false;
2989 }
2990 }
2991 }
2992 }
2993
2994 // Used by ServiceThread to discover there is work to do.
2995 bool JvmtiTagMap::has_object_free_events_and_reset() {
2996 assert_lock_strong(Service_lock);
2997 bool result = _has_object_free_events;
2998 _has_object_free_events = false;
2999 return result;
3000 }
3001
3002 // Used by ServiceThread to clean up tagmaps.
3003 void JvmtiTagMap::flush_all_object_free_events() {
3004 JavaThread* thread = JavaThread::current();
3005 JvmtiEnvIterator it;
3006 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
3007 JvmtiTagMap* tag_map = env->tag_map_acquire();
3008 if (tag_map != NULL) {
3009 tag_map->flush_object_free_events();
3010 ThreadBlockInVM tbiv(thread); // Be safepoint-polite while looping.
3011 }
3012 }
3013 }