1 /* 2 * Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/classLoaderDataGraph.hpp" 27 #include "classfile/javaClasses.inline.hpp" 28 #include "classfile/symbolTable.hpp" 29 #include "classfile/vmClasses.hpp" 30 #include "classfile/vmSymbols.hpp" 31 #include "gc/shared/collectedHeap.hpp" 32 #include "jvmtifiles/jvmtiEnv.hpp" 33 #include "logging/log.hpp" 34 #include "memory/allocation.inline.hpp" 35 #include "memory/resourceArea.hpp" 36 #include "memory/universe.hpp" 37 #include "oops/access.inline.hpp" 38 #include "oops/arrayOop.hpp" 39 #include "oops/constantPool.inline.hpp" 40 #include "oops/instanceMirrorKlass.hpp" 41 #include "oops/klass.inline.hpp" 42 #include "oops/objArrayKlass.hpp" 43 #include "oops/objArrayOop.inline.hpp" 44 #include "oops/oop.inline.hpp" 45 #include "oops/typeArrayOop.inline.hpp" 46 #include "prims/jvmtiEventController.hpp" 47 #include "prims/jvmtiEventController.inline.hpp" 48 #include "prims/jvmtiExport.hpp" 49 #include "prims/jvmtiImpl.hpp" 50 #include "prims/jvmtiTagMap.hpp" 51 #include "prims/jvmtiTagMapTable.hpp" 52 #include "runtime/biasedLocking.hpp" 53 #include "runtime/deoptimization.hpp" 54 #include "runtime/frame.inline.hpp" 55 #include "runtime/handles.inline.hpp" 56 #include "runtime/interfaceSupport.inline.hpp" 57 #include "runtime/javaCalls.hpp" 58 #include "runtime/jniHandles.inline.hpp" 59 #include "runtime/mutex.hpp" 60 #include "runtime/mutexLocker.hpp" 61 #include "runtime/reflectionUtils.hpp" 62 #include "runtime/safepoint.hpp" 63 #include "runtime/timerTrace.hpp" 64 #include "runtime/thread.inline.hpp" 65 #include "runtime/threadSMR.hpp" 66 #include "runtime/vframe.hpp" 67 #include "runtime/vmThread.hpp" 68 #include "runtime/vmOperations.hpp" 69 #include "utilities/macros.hpp" 70 71 bool JvmtiTagMap::_has_object_free_events = false; 72 73 // create a JvmtiTagMap 74 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) : 75 _env(env), 76 _lock(Mutex::nonleaf+1, "JvmtiTagMap_lock", Mutex::_allow_vm_block_flag, 77 Mutex::_safepoint_check_never), 78 _needs_rehashing(false), 79 _needs_cleaning(false), 80 _posting_events(false) { 81 82 assert(JvmtiThreadState_lock->is_locked(), "sanity check"); 83 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment"); 84 85 _hashmap = new JvmtiTagMapTable(); 86 87 // finally add us to the environment 88 ((JvmtiEnvBase *)env)->release_set_tag_map(this); 89 } 90 91 // destroy a JvmtiTagMap 92 JvmtiTagMap::~JvmtiTagMap() { 93 94 // no lock acquired as we assume the enclosing environment is 95 // also being destroyed. 96 ((JvmtiEnvBase *)_env)->set_tag_map(NULL); 97 98 // finally destroy the hashmap 99 delete _hashmap; 100 _hashmap = NULL; 101 } 102 103 // Called by env_dispose() to reclaim memory before deallocation. 104 // Remove all the entries but keep the empty table intact. 105 // This needs the table lock. 106 void JvmtiTagMap::clear() { 107 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag); 108 _hashmap->clear(); 109 } 110 111 // returns the tag map for the given environments. If the tag map 112 // doesn't exist then it is created. 113 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) { 114 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire(); 115 if (tag_map == NULL) { 116 MutexLocker mu(JvmtiThreadState_lock); 117 tag_map = ((JvmtiEnvBase*)env)->tag_map(); 118 if (tag_map == NULL) { 119 tag_map = new JvmtiTagMap(env); 120 } 121 } else { 122 DEBUG_ONLY(JavaThread::current()->check_possible_safepoint()); 123 } 124 return tag_map; 125 } 126 127 // iterate over all entries in the tag map. 128 void JvmtiTagMap::entry_iterate(JvmtiTagMapEntryClosure* closure) { 129 hashmap()->entry_iterate(closure); 130 } 131 132 // returns true if the hashmaps are empty 133 bool JvmtiTagMap::is_empty() { 134 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 135 return hashmap()->is_empty(); 136 } 137 138 // This checks for posting and rehashing before operations that 139 // this tagmap table. 140 void JvmtiTagMap::check_hashmap(GrowableArray<jlong>* objects) { 141 assert(is_locked(), "checking"); 142 143 if (is_empty()) { return; } 144 145 if (_needs_cleaning && 146 objects != NULL && 147 env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) { 148 remove_dead_entries_locked(objects); 149 } 150 if (_needs_rehashing) { 151 log_info(jvmti, table)("TagMap table needs rehashing"); 152 hashmap()->rehash(); 153 _needs_rehashing = false; 154 } 155 } 156 157 // This checks for posting and rehashing and is called from the heap walks. 158 void JvmtiTagMap::check_hashmaps_for_heapwalk(GrowableArray<jlong>* objects) { 159 assert(SafepointSynchronize::is_at_safepoint(), "called from safepoints"); 160 161 // Verify that the tag map tables are valid and unconditionally post events 162 // that are expected to be posted before gc_notification. 163 JvmtiEnvIterator it; 164 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) { 165 JvmtiTagMap* tag_map = env->tag_map_acquire(); 166 if (tag_map != NULL) { 167 // The ZDriver may be walking the hashmaps concurrently so this lock is needed. 168 MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag); 169 tag_map->check_hashmap(objects); 170 } 171 } 172 } 173 174 // Return the tag value for an object, or 0 if the object is 175 // not tagged 176 // 177 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) { 178 JvmtiTagMapEntry* entry = tag_map->hashmap()->find(o); 179 if (entry == NULL) { 180 return 0; 181 } else { 182 jlong tag = entry->tag(); 183 assert(tag != 0, "should not be zero"); 184 return entry->tag(); 185 } 186 } 187 188 189 // A CallbackWrapper is a support class for querying and tagging an object 190 // around a callback to a profiler. The constructor does pre-callback 191 // work to get the tag value, klass tag value, ... and the destructor 192 // does the post-callback work of tagging or untagging the object. 193 // 194 // { 195 // CallbackWrapper wrapper(tag_map, o); 196 // 197 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...) 198 // 199 // } // wrapper goes out of scope here which results in the destructor 200 // checking to see if the object has been tagged, untagged, or the 201 // tag value has changed. 202 // 203 class CallbackWrapper : public StackObj { 204 private: 205 JvmtiTagMap* _tag_map; 206 JvmtiTagMapTable* _hashmap; 207 JvmtiTagMapEntry* _entry; 208 oop _o; 209 jlong _obj_size; 210 jlong _obj_tag; 211 jlong _klass_tag; 212 213 protected: 214 JvmtiTagMap* tag_map() const { return _tag_map; } 215 216 // invoked post-callback to tag, untag, or update the tag of an object 217 void inline post_callback_tag_update(oop o, JvmtiTagMapTable* hashmap, 218 JvmtiTagMapEntry* entry, jlong obj_tag); 219 public: 220 CallbackWrapper(JvmtiTagMap* tag_map, oop o) { 221 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(), 222 "MT unsafe or must be VM thread"); 223 224 // object to tag 225 _o = o; 226 227 // object size 228 _obj_size = (jlong)_o->size() * wordSize; 229 230 // record the context 231 _tag_map = tag_map; 232 _hashmap = tag_map->hashmap(); 233 _entry = _hashmap->find(_o); 234 235 // get object tag 236 _obj_tag = (_entry == NULL) ? 0 : _entry->tag(); 237 238 // get the class and the class's tag value 239 assert(vmClasses::Class_klass()->is_mirror_instance_klass(), "Is not?"); 240 241 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror()); 242 } 243 244 ~CallbackWrapper() { 245 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag); 246 } 247 248 inline jlong* obj_tag_p() { return &_obj_tag; } 249 inline jlong obj_size() const { return _obj_size; } 250 inline jlong obj_tag() const { return _obj_tag; } 251 inline jlong klass_tag() const { return _klass_tag; } 252 }; 253 254 255 256 // callback post-callback to tag, untag, or update the tag of an object 257 void inline CallbackWrapper::post_callback_tag_update(oop o, 258 JvmtiTagMapTable* hashmap, 259 JvmtiTagMapEntry* entry, 260 jlong obj_tag) { 261 if (entry == NULL) { 262 if (obj_tag != 0) { 263 // callback has tagged the object 264 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 265 hashmap->add(o, obj_tag); 266 } 267 } else { 268 // object was previously tagged - the callback may have untagged 269 // the object or changed the tag value 270 if (obj_tag == 0) { 271 hashmap->remove(o); 272 } else { 273 if (obj_tag != entry->tag()) { 274 entry->set_tag(obj_tag); 275 } 276 } 277 } 278 } 279 280 // An extended CallbackWrapper used when reporting an object reference 281 // to the agent. 282 // 283 // { 284 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o); 285 // 286 // (*callback)(wrapper.klass_tag(), 287 // wrapper.obj_size(), 288 // wrapper.obj_tag_p() 289 // wrapper.referrer_tag_p(), ...) 290 // 291 // } // wrapper goes out of scope here which results in the destructor 292 // checking to see if the referrer object has been tagged, untagged, 293 // or the tag value has changed. 294 // 295 class TwoOopCallbackWrapper : public CallbackWrapper { 296 private: 297 bool _is_reference_to_self; 298 JvmtiTagMapTable* _referrer_hashmap; 299 JvmtiTagMapEntry* _referrer_entry; 300 oop _referrer; 301 jlong _referrer_obj_tag; 302 jlong _referrer_klass_tag; 303 jlong* _referrer_tag_p; 304 305 bool is_reference_to_self() const { return _is_reference_to_self; } 306 307 public: 308 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) : 309 CallbackWrapper(tag_map, o) 310 { 311 // self reference needs to be handled in a special way 312 _is_reference_to_self = (referrer == o); 313 314 if (_is_reference_to_self) { 315 _referrer_klass_tag = klass_tag(); 316 _referrer_tag_p = obj_tag_p(); 317 } else { 318 _referrer = referrer; 319 // record the context 320 _referrer_hashmap = tag_map->hashmap(); 321 _referrer_entry = _referrer_hashmap->find(_referrer); 322 323 // get object tag 324 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag(); 325 _referrer_tag_p = &_referrer_obj_tag; 326 327 // get referrer class tag. 328 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror()); 329 } 330 } 331 332 ~TwoOopCallbackWrapper() { 333 if (!is_reference_to_self()){ 334 post_callback_tag_update(_referrer, 335 _referrer_hashmap, 336 _referrer_entry, 337 _referrer_obj_tag); 338 } 339 } 340 341 // address of referrer tag 342 // (for a self reference this will return the same thing as obj_tag_p()) 343 inline jlong* referrer_tag_p() { return _referrer_tag_p; } 344 345 // referrer's class tag 346 inline jlong referrer_klass_tag() { return _referrer_klass_tag; } 347 }; 348 349 // tag an object 350 // 351 // This function is performance critical. If many threads attempt to tag objects 352 // around the same time then it's possible that the Mutex associated with the 353 // tag map will be a hot lock. 354 void JvmtiTagMap::set_tag(jobject object, jlong tag) { 355 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag); 356 357 // SetTag should not post events because the JavaThread has to 358 // transition to native for the callback and this cannot stop for 359 // safepoints with the hashmap lock held. 360 check_hashmap(NULL); /* don't collect dead objects */ 361 362 // resolve the object 363 oop o = JNIHandles::resolve_non_null(object); 364 365 // see if the object is already tagged 366 JvmtiTagMapTable* hashmap = _hashmap; 367 JvmtiTagMapEntry* entry = hashmap->find(o); 368 369 // if the object is not already tagged then we tag it 370 if (entry == NULL) { 371 if (tag != 0) { 372 hashmap->add(o, tag); 373 } else { 374 // no-op 375 } 376 } else { 377 // if the object is already tagged then we either update 378 // the tag (if a new tag value has been provided) 379 // or remove the object if the new tag value is 0. 380 if (tag == 0) { 381 hashmap->remove(o); 382 } else { 383 entry->set_tag(tag); 384 } 385 } 386 } 387 388 // get the tag for an object 389 jlong JvmtiTagMap::get_tag(jobject object) { 390 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag); 391 392 // GetTag should not post events because the JavaThread has to 393 // transition to native for the callback and this cannot stop for 394 // safepoints with the hashmap lock held. 395 check_hashmap(NULL); /* don't collect dead objects */ 396 397 // resolve the object 398 oop o = JNIHandles::resolve_non_null(object); 399 400 return tag_for(this, o); 401 } 402 403 404 // Helper class used to describe the static or instance fields of a class. 405 // For each field it holds the field index (as defined by the JVMTI specification), 406 // the field type, and the offset. 407 408 class ClassFieldDescriptor: public CHeapObj<mtInternal> { 409 private: 410 int _field_index; 411 int _field_offset; 412 char _field_type; 413 public: 414 ClassFieldDescriptor(int index, char type, int offset) : 415 _field_index(index), _field_offset(offset), _field_type(type) { 416 } 417 int field_index() const { return _field_index; } 418 char field_type() const { return _field_type; } 419 int field_offset() const { return _field_offset; } 420 }; 421 422 class ClassFieldMap: public CHeapObj<mtInternal> { 423 private: 424 enum { 425 initial_field_count = 5 426 }; 427 428 // list of field descriptors 429 GrowableArray<ClassFieldDescriptor*>* _fields; 430 431 // constructor 432 ClassFieldMap(); 433 434 // add a field 435 void add(int index, char type, int offset); 436 437 public: 438 ~ClassFieldMap(); 439 440 // access 441 int field_count() { return _fields->length(); } 442 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); } 443 444 // functions to create maps of static or instance fields 445 static ClassFieldMap* create_map_of_static_fields(Klass* k); 446 static ClassFieldMap* create_map_of_instance_fields(oop obj); 447 }; 448 449 ClassFieldMap::ClassFieldMap() { 450 _fields = new (ResourceObj::C_HEAP, mtServiceability) 451 GrowableArray<ClassFieldDescriptor*>(initial_field_count, mtServiceability); 452 } 453 454 ClassFieldMap::~ClassFieldMap() { 455 for (int i=0; i<_fields->length(); i++) { 456 delete _fields->at(i); 457 } 458 delete _fields; 459 } 460 461 void ClassFieldMap::add(int index, char type, int offset) { 462 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset); 463 _fields->append(field); 464 } 465 466 // Returns a heap allocated ClassFieldMap to describe the static fields 467 // of the given class. 468 // 469 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) { 470 InstanceKlass* ik = InstanceKlass::cast(k); 471 472 // create the field map 473 ClassFieldMap* field_map = new ClassFieldMap(); 474 475 FilteredFieldStream f(ik, false, false); 476 int max_field_index = f.field_count()-1; 477 478 int index = 0; 479 for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) { 480 // ignore instance fields 481 if (!fld.access_flags().is_static()) { 482 continue; 483 } 484 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 485 } 486 return field_map; 487 } 488 489 // Returns a heap allocated ClassFieldMap to describe the instance fields 490 // of the given class. All instance fields are included (this means public 491 // and private fields declared in superclasses and superinterfaces too). 492 // 493 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) { 494 InstanceKlass* ik = InstanceKlass::cast(obj->klass()); 495 496 // create the field map 497 ClassFieldMap* field_map = new ClassFieldMap(); 498 499 FilteredFieldStream f(ik, false, false); 500 501 int max_field_index = f.field_count()-1; 502 503 int index = 0; 504 for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) { 505 // ignore static fields 506 if (fld.access_flags().is_static()) { 507 continue; 508 } 509 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 510 } 511 512 return field_map; 513 } 514 515 // Helper class used to cache a ClassFileMap for the instance fields of 516 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during 517 // heap iteration and avoid creating a field map for each object in the heap 518 // (only need to create the map when the first instance of a class is encountered). 519 // 520 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> { 521 private: 522 enum { 523 initial_class_count = 200 524 }; 525 ClassFieldMap* _field_map; 526 527 ClassFieldMap* field_map() const { return _field_map; } 528 529 JvmtiCachedClassFieldMap(ClassFieldMap* field_map); 530 ~JvmtiCachedClassFieldMap(); 531 532 static GrowableArray<InstanceKlass*>* _class_list; 533 static void add_to_class_list(InstanceKlass* ik); 534 535 public: 536 // returns the field map for a given object (returning map cached 537 // by InstanceKlass if possible 538 static ClassFieldMap* get_map_of_instance_fields(oop obj); 539 540 // removes the field map from all instanceKlasses - should be 541 // called before VM operation completes 542 static void clear_cache(); 543 544 // returns the number of ClassFieldMap cached by instanceKlasses 545 static int cached_field_map_count(); 546 }; 547 548 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list; 549 550 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) { 551 _field_map = field_map; 552 } 553 554 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() { 555 if (_field_map != NULL) { 556 delete _field_map; 557 } 558 } 559 560 // Marker class to ensure that the class file map cache is only used in a defined 561 // scope. 562 class ClassFieldMapCacheMark : public StackObj { 563 private: 564 static bool _is_active; 565 public: 566 ClassFieldMapCacheMark() { 567 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 568 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty"); 569 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested"); 570 _is_active = true; 571 } 572 ~ClassFieldMapCacheMark() { 573 JvmtiCachedClassFieldMap::clear_cache(); 574 _is_active = false; 575 } 576 static bool is_active() { return _is_active; } 577 }; 578 579 bool ClassFieldMapCacheMark::_is_active; 580 581 582 // record that the given InstanceKlass is caching a field map 583 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) { 584 if (_class_list == NULL) { 585 _class_list = new (ResourceObj::C_HEAP, mtServiceability) 586 GrowableArray<InstanceKlass*>(initial_class_count, mtServiceability); 587 } 588 _class_list->push(ik); 589 } 590 591 // returns the instance field map for the given object 592 // (returns field map cached by the InstanceKlass if possible) 593 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) { 594 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 595 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active"); 596 597 Klass* k = obj->klass(); 598 InstanceKlass* ik = InstanceKlass::cast(k); 599 600 // return cached map if possible 601 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 602 if (cached_map != NULL) { 603 assert(cached_map->field_map() != NULL, "missing field list"); 604 return cached_map->field_map(); 605 } else { 606 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj); 607 cached_map = new JvmtiCachedClassFieldMap(field_map); 608 ik->set_jvmti_cached_class_field_map(cached_map); 609 add_to_class_list(ik); 610 return field_map; 611 } 612 } 613 614 // remove the fields maps cached from all instanceKlasses 615 void JvmtiCachedClassFieldMap::clear_cache() { 616 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 617 if (_class_list != NULL) { 618 for (int i = 0; i < _class_list->length(); i++) { 619 InstanceKlass* ik = _class_list->at(i); 620 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 621 assert(cached_map != NULL, "should not be NULL"); 622 ik->set_jvmti_cached_class_field_map(NULL); 623 delete cached_map; // deletes the encapsulated field map 624 } 625 delete _class_list; 626 _class_list = NULL; 627 } 628 } 629 630 // returns the number of ClassFieldMap cached by instanceKlasses 631 int JvmtiCachedClassFieldMap::cached_field_map_count() { 632 return (_class_list == NULL) ? 0 : _class_list->length(); 633 } 634 635 // helper function to indicate if an object is filtered by its tag or class tag 636 static inline bool is_filtered_by_heap_filter(jlong obj_tag, 637 jlong klass_tag, 638 int heap_filter) { 639 // apply the heap filter 640 if (obj_tag != 0) { 641 // filter out tagged objects 642 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true; 643 } else { 644 // filter out untagged objects 645 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true; 646 } 647 if (klass_tag != 0) { 648 // filter out objects with tagged classes 649 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true; 650 } else { 651 // filter out objects with untagged classes. 652 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true; 653 } 654 return false; 655 } 656 657 // helper function to indicate if an object is filtered by a klass filter 658 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) { 659 if (klass_filter != NULL) { 660 if (obj->klass() != klass_filter) { 661 return true; 662 } 663 } 664 return false; 665 } 666 667 // helper function to tell if a field is a primitive field or not 668 static inline bool is_primitive_field_type(char type) { 669 return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY); 670 } 671 672 // helper function to copy the value from location addr to jvalue. 673 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) { 674 switch (value_type) { 675 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; } 676 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; } 677 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; } 678 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; } 679 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; } 680 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; } 681 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; } 682 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; } 683 default: ShouldNotReachHere(); 684 } 685 } 686 687 // helper function to invoke string primitive value callback 688 // returns visit control flags 689 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb, 690 CallbackWrapper* wrapper, 691 oop str, 692 void* user_data) 693 { 694 assert(str->klass() == vmClasses::String_klass(), "not a string"); 695 696 typeArrayOop s_value = java_lang_String::value(str); 697 698 // JDK-6584008: the value field may be null if a String instance is 699 // partially constructed. 700 if (s_value == NULL) { 701 return 0; 702 } 703 // get the string value and length 704 // (string value may be offset from the base) 705 int s_len = java_lang_String::length(str); 706 bool is_latin1 = java_lang_String::is_latin1(str); 707 jchar* value; 708 if (s_len > 0) { 709 if (!is_latin1) { 710 value = s_value->char_at_addr(0); 711 } else { 712 // Inflate latin1 encoded string to UTF16 713 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal); 714 for (int i = 0; i < s_len; i++) { 715 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff; 716 } 717 value = &buf[0]; 718 } 719 } else { 720 // Don't use char_at_addr(0) if length is 0 721 value = (jchar*) s_value->base(T_CHAR); 722 } 723 724 // invoke the callback 725 jint res = (*cb)(wrapper->klass_tag(), 726 wrapper->obj_size(), 727 wrapper->obj_tag_p(), 728 value, 729 (jint)s_len, 730 user_data); 731 732 if (is_latin1 && s_len > 0) { 733 FREE_C_HEAP_ARRAY(jchar, value); 734 } 735 return res; 736 } 737 738 // helper function to invoke string primitive value callback 739 // returns visit control flags 740 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb, 741 CallbackWrapper* wrapper, 742 oop obj, 743 void* user_data) 744 { 745 assert(obj->is_typeArray(), "not a primitive array"); 746 747 // get base address of first element 748 typeArrayOop array = typeArrayOop(obj); 749 BasicType type = TypeArrayKlass::cast(array->klass())->element_type(); 750 void* elements = array->base(type); 751 752 // jvmtiPrimitiveType is defined so this mapping is always correct 753 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type); 754 755 return (*cb)(wrapper->klass_tag(), 756 wrapper->obj_size(), 757 wrapper->obj_tag_p(), 758 (jint)array->length(), 759 elem_type, 760 elements, 761 user_data); 762 } 763 764 // helper function to invoke the primitive field callback for all static fields 765 // of a given class 766 static jint invoke_primitive_field_callback_for_static_fields 767 (CallbackWrapper* wrapper, 768 oop obj, 769 jvmtiPrimitiveFieldCallback cb, 770 void* user_data) 771 { 772 // for static fields only the index will be set 773 static jvmtiHeapReferenceInfo reference_info = { 0 }; 774 775 assert(obj->klass() == vmClasses::Class_klass(), "not a class"); 776 if (java_lang_Class::is_primitive(obj)) { 777 return 0; 778 } 779 Klass* klass = java_lang_Class::as_Klass(obj); 780 781 // ignore classes for object and type arrays 782 if (!klass->is_instance_klass()) { 783 return 0; 784 } 785 786 // ignore classes which aren't linked yet 787 InstanceKlass* ik = InstanceKlass::cast(klass); 788 if (!ik->is_linked()) { 789 return 0; 790 } 791 792 // get the field map 793 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 794 795 // invoke the callback for each static primitive field 796 for (int i=0; i<field_map->field_count(); i++) { 797 ClassFieldDescriptor* field = field_map->field_at(i); 798 799 // ignore non-primitive fields 800 char type = field->field_type(); 801 if (!is_primitive_field_type(type)) { 802 continue; 803 } 804 // one-to-one mapping 805 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 806 807 // get offset and field value 808 int offset = field->field_offset(); 809 address addr = cast_from_oop<address>(klass->java_mirror()) + offset; 810 jvalue value; 811 copy_to_jvalue(&value, addr, value_type); 812 813 // field index 814 reference_info.field.index = field->field_index(); 815 816 // invoke the callback 817 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 818 &reference_info, 819 wrapper->klass_tag(), 820 wrapper->obj_tag_p(), 821 value, 822 value_type, 823 user_data); 824 if (res & JVMTI_VISIT_ABORT) { 825 delete field_map; 826 return res; 827 } 828 } 829 830 delete field_map; 831 return 0; 832 } 833 834 // helper function to invoke the primitive field callback for all instance fields 835 // of a given object 836 static jint invoke_primitive_field_callback_for_instance_fields( 837 CallbackWrapper* wrapper, 838 oop obj, 839 jvmtiPrimitiveFieldCallback cb, 840 void* user_data) 841 { 842 // for instance fields only the index will be set 843 static jvmtiHeapReferenceInfo reference_info = { 0 }; 844 845 // get the map of the instance fields 846 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj); 847 848 // invoke the callback for each instance primitive field 849 for (int i=0; i<fields->field_count(); i++) { 850 ClassFieldDescriptor* field = fields->field_at(i); 851 852 // ignore non-primitive fields 853 char type = field->field_type(); 854 if (!is_primitive_field_type(type)) { 855 continue; 856 } 857 // one-to-one mapping 858 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 859 860 // get offset and field value 861 int offset = field->field_offset(); 862 address addr = cast_from_oop<address>(obj) + offset; 863 jvalue value; 864 copy_to_jvalue(&value, addr, value_type); 865 866 // field index 867 reference_info.field.index = field->field_index(); 868 869 // invoke the callback 870 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD, 871 &reference_info, 872 wrapper->klass_tag(), 873 wrapper->obj_tag_p(), 874 value, 875 value_type, 876 user_data); 877 if (res & JVMTI_VISIT_ABORT) { 878 return res; 879 } 880 } 881 return 0; 882 } 883 884 885 // VM operation to iterate over all objects in the heap (both reachable 886 // and unreachable) 887 class VM_HeapIterateOperation: public VM_Operation { 888 private: 889 ObjectClosure* _blk; 890 GrowableArray<jlong>* const _dead_objects; 891 public: 892 VM_HeapIterateOperation(ObjectClosure* blk, GrowableArray<jlong>* objects) : 893 _blk(blk), _dead_objects(objects) { } 894 895 VMOp_Type type() const { return VMOp_HeapIterateOperation; } 896 void doit() { 897 // allows class files maps to be cached during iteration 898 ClassFieldMapCacheMark cm; 899 900 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects); 901 902 // make sure that heap is parsable (fills TLABs with filler objects) 903 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 904 905 // Verify heap before iteration - if the heap gets corrupted then 906 // JVMTI's IterateOverHeap will crash. 907 if (VerifyBeforeIteration) { 908 Universe::verify(); 909 } 910 911 // do the iteration 912 Universe::heap()->object_iterate(_blk); 913 } 914 915 }; 916 917 918 // An ObjectClosure used to support the deprecated IterateOverHeap and 919 // IterateOverInstancesOfClass functions 920 class IterateOverHeapObjectClosure: public ObjectClosure { 921 private: 922 JvmtiTagMap* _tag_map; 923 Klass* _klass; 924 jvmtiHeapObjectFilter _object_filter; 925 jvmtiHeapObjectCallback _heap_object_callback; 926 const void* _user_data; 927 928 // accessors 929 JvmtiTagMap* tag_map() const { return _tag_map; } 930 jvmtiHeapObjectFilter object_filter() const { return _object_filter; } 931 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; } 932 Klass* klass() const { return _klass; } 933 const void* user_data() const { return _user_data; } 934 935 // indicates if iteration has been aborted 936 bool _iteration_aborted; 937 bool is_iteration_aborted() const { return _iteration_aborted; } 938 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; } 939 940 public: 941 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map, 942 Klass* klass, 943 jvmtiHeapObjectFilter object_filter, 944 jvmtiHeapObjectCallback heap_object_callback, 945 const void* user_data) : 946 _tag_map(tag_map), 947 _klass(klass), 948 _object_filter(object_filter), 949 _heap_object_callback(heap_object_callback), 950 _user_data(user_data), 951 _iteration_aborted(false) 952 { 953 } 954 955 void do_object(oop o); 956 }; 957 958 // invoked for each object in the heap 959 void IterateOverHeapObjectClosure::do_object(oop o) { 960 // check if iteration has been halted 961 if (is_iteration_aborted()) return; 962 963 // instanceof check when filtering by klass 964 if (klass() != NULL && !o->is_a(klass())) { 965 return; 966 } 967 968 // skip if object is a dormant shared object whose mirror hasn't been loaded 969 if (o != NULL && o->klass()->java_mirror() == NULL) { 970 log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(o), 971 o->klass()->external_name()); 972 return; 973 } 974 975 // prepare for the calllback 976 CallbackWrapper wrapper(tag_map(), o); 977 978 // if the object is tagged and we're only interested in untagged objects 979 // then don't invoke the callback. Similiarly, if the object is untagged 980 // and we're only interested in tagged objects we skip the callback. 981 if (wrapper.obj_tag() != 0) { 982 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return; 983 } else { 984 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return; 985 } 986 987 // invoke the agent's callback 988 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(), 989 wrapper.obj_size(), 990 wrapper.obj_tag_p(), 991 (void*)user_data()); 992 if (control == JVMTI_ITERATION_ABORT) { 993 set_iteration_aborted(true); 994 } 995 } 996 997 // An ObjectClosure used to support the IterateThroughHeap function 998 class IterateThroughHeapObjectClosure: public ObjectClosure { 999 private: 1000 JvmtiTagMap* _tag_map; 1001 Klass* _klass; 1002 int _heap_filter; 1003 const jvmtiHeapCallbacks* _callbacks; 1004 const void* _user_data; 1005 1006 // accessor functions 1007 JvmtiTagMap* tag_map() const { return _tag_map; } 1008 int heap_filter() const { return _heap_filter; } 1009 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; } 1010 Klass* klass() const { return _klass; } 1011 const void* user_data() const { return _user_data; } 1012 1013 // indicates if the iteration has been aborted 1014 bool _iteration_aborted; 1015 bool is_iteration_aborted() const { return _iteration_aborted; } 1016 1017 // used to check the visit control flags. If the abort flag is set 1018 // then we set the iteration aborted flag so that the iteration completes 1019 // without processing any further objects 1020 bool check_flags_for_abort(jint flags) { 1021 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0; 1022 if (is_abort) { 1023 _iteration_aborted = true; 1024 } 1025 return is_abort; 1026 } 1027 1028 public: 1029 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map, 1030 Klass* klass, 1031 int heap_filter, 1032 const jvmtiHeapCallbacks* heap_callbacks, 1033 const void* user_data) : 1034 _tag_map(tag_map), 1035 _klass(klass), 1036 _heap_filter(heap_filter), 1037 _callbacks(heap_callbacks), 1038 _user_data(user_data), 1039 _iteration_aborted(false) 1040 { 1041 } 1042 1043 void do_object(oop o); 1044 }; 1045 1046 // invoked for each object in the heap 1047 void IterateThroughHeapObjectClosure::do_object(oop obj) { 1048 // check if iteration has been halted 1049 if (is_iteration_aborted()) return; 1050 1051 // apply class filter 1052 if (is_filtered_by_klass_filter(obj, klass())) return; 1053 1054 // skip if object is a dormant shared object whose mirror hasn't been loaded 1055 if (obj != NULL && obj->klass()->java_mirror() == NULL) { 1056 log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(obj), 1057 obj->klass()->external_name()); 1058 return; 1059 } 1060 1061 // prepare for callback 1062 CallbackWrapper wrapper(tag_map(), obj); 1063 1064 // check if filtered by the heap filter 1065 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) { 1066 return; 1067 } 1068 1069 // for arrays we need the length, otherwise -1 1070 bool is_array = obj->is_array(); 1071 int len = is_array ? arrayOop(obj)->length() : -1; 1072 1073 // invoke the object callback (if callback is provided) 1074 if (callbacks()->heap_iteration_callback != NULL) { 1075 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback; 1076 jint res = (*cb)(wrapper.klass_tag(), 1077 wrapper.obj_size(), 1078 wrapper.obj_tag_p(), 1079 (jint)len, 1080 (void*)user_data()); 1081 if (check_flags_for_abort(res)) return; 1082 } 1083 1084 // for objects and classes we report primitive fields if callback provided 1085 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) { 1086 jint res; 1087 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback; 1088 if (obj->klass() == vmClasses::Class_klass()) { 1089 res = invoke_primitive_field_callback_for_static_fields(&wrapper, 1090 obj, 1091 cb, 1092 (void*)user_data()); 1093 } else { 1094 res = invoke_primitive_field_callback_for_instance_fields(&wrapper, 1095 obj, 1096 cb, 1097 (void*)user_data()); 1098 } 1099 if (check_flags_for_abort(res)) return; 1100 } 1101 1102 // string callback 1103 if (!is_array && 1104 callbacks()->string_primitive_value_callback != NULL && 1105 obj->klass() == vmClasses::String_klass()) { 1106 jint res = invoke_string_value_callback( 1107 callbacks()->string_primitive_value_callback, 1108 &wrapper, 1109 obj, 1110 (void*)user_data() ); 1111 if (check_flags_for_abort(res)) return; 1112 } 1113 1114 // array callback 1115 if (is_array && 1116 callbacks()->array_primitive_value_callback != NULL && 1117 obj->is_typeArray()) { 1118 jint res = invoke_array_primitive_value_callback( 1119 callbacks()->array_primitive_value_callback, 1120 &wrapper, 1121 obj, 1122 (void*)user_data() ); 1123 if (check_flags_for_abort(res)) return; 1124 } 1125 }; 1126 1127 1128 // Deprecated function to iterate over all objects in the heap 1129 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter, 1130 Klass* klass, 1131 jvmtiHeapObjectCallback heap_object_callback, 1132 const void* user_data) 1133 { 1134 // EA based optimizations on tagged objects are already reverted. 1135 EscapeBarrier eb(object_filter == JVMTI_HEAP_OBJECT_UNTAGGED || 1136 object_filter == JVMTI_HEAP_OBJECT_EITHER, 1137 JavaThread::current()); 1138 eb.deoptimize_objects_all_threads(); 1139 Arena dead_object_arena(mtServiceability); 1140 GrowableArray <jlong> dead_objects(&dead_object_arena, 10, 0, 0); 1141 { 1142 MutexLocker ml(Heap_lock); 1143 IterateOverHeapObjectClosure blk(this, 1144 klass, 1145 object_filter, 1146 heap_object_callback, 1147 user_data); 1148 VM_HeapIterateOperation op(&blk, &dead_objects); 1149 VMThread::execute(&op); 1150 } 1151 // Post events outside of Heap_lock 1152 post_dead_objects(&dead_objects); 1153 } 1154 1155 1156 // Iterates over all objects in the heap 1157 void JvmtiTagMap::iterate_through_heap(jint heap_filter, 1158 Klass* klass, 1159 const jvmtiHeapCallbacks* callbacks, 1160 const void* user_data) 1161 { 1162 // EA based optimizations on tagged objects are already reverted. 1163 EscapeBarrier eb(!(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED), JavaThread::current()); 1164 eb.deoptimize_objects_all_threads(); 1165 1166 Arena dead_object_arena(mtServiceability); 1167 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0); 1168 { 1169 MutexLocker ml(Heap_lock); 1170 IterateThroughHeapObjectClosure blk(this, 1171 klass, 1172 heap_filter, 1173 callbacks, 1174 user_data); 1175 VM_HeapIterateOperation op(&blk, &dead_objects); 1176 VMThread::execute(&op); 1177 } 1178 // Post events outside of Heap_lock 1179 post_dead_objects(&dead_objects); 1180 } 1181 1182 void JvmtiTagMap::remove_dead_entries_locked(GrowableArray<jlong>* objects) { 1183 assert(is_locked(), "precondition"); 1184 if (_needs_cleaning) { 1185 // Recheck whether to post object free events under the lock. 1186 if (!env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) { 1187 objects = NULL; 1188 } 1189 log_info(jvmti, table)("TagMap table needs cleaning%s", 1190 ((objects != NULL) ? " and posting" : "")); 1191 hashmap()->remove_dead_entries(objects); 1192 _needs_cleaning = false; 1193 } 1194 } 1195 1196 void JvmtiTagMap::remove_dead_entries(GrowableArray<jlong>* objects) { 1197 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag); 1198 remove_dead_entries_locked(objects); 1199 } 1200 1201 void JvmtiTagMap::post_dead_objects(GrowableArray<jlong>* const objects) { 1202 assert(Thread::current()->is_Java_thread(), "Must post from JavaThread"); 1203 if (objects != NULL && objects->length() > 0) { 1204 JvmtiExport::post_object_free(env(), objects); 1205 log_info(jvmti)("%d free object posted", objects->length()); 1206 } 1207 } 1208 1209 void JvmtiTagMap::remove_and_post_dead_objects() { 1210 ResourceMark rm; 1211 GrowableArray<jlong> objects; 1212 remove_dead_entries(&objects); 1213 post_dead_objects(&objects); 1214 } 1215 1216 void JvmtiTagMap::flush_object_free_events() { 1217 assert_not_at_safepoint(); 1218 if (env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) { 1219 { 1220 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag); 1221 // If another thread is posting events, let it finish 1222 while (_posting_events) { 1223 ml.wait(); 1224 } 1225 1226 if (!_needs_cleaning || is_empty()) { 1227 _needs_cleaning = false; 1228 return; 1229 } 1230 _posting_events = true; 1231 } // Drop the lock so we can do the cleaning on the VM thread. 1232 // Needs both cleaning and event posting (up to some other thread 1233 // getting there first after we dropped the lock). 1234 remove_and_post_dead_objects(); 1235 { 1236 MonitorLocker ml(lock(), Mutex::_no_safepoint_check_flag); 1237 _posting_events = false; 1238 ml.notify_all(); 1239 } 1240 } else { 1241 remove_dead_entries(NULL); 1242 } 1243 } 1244 1245 // support class for get_objects_with_tags 1246 1247 class TagObjectCollector : public JvmtiTagMapEntryClosure { 1248 private: 1249 JvmtiEnv* _env; 1250 JavaThread* _thread; 1251 jlong* _tags; 1252 jint _tag_count; 1253 bool _some_dead_found; 1254 1255 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs) 1256 GrowableArray<uint64_t>* _tag_results; // collected tags 1257 1258 public: 1259 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) : 1260 _env(env), 1261 _thread(JavaThread::current()), 1262 _tags((jlong*)tags), 1263 _tag_count(tag_count), 1264 _some_dead_found(false), 1265 _object_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<jobject>(1, mtServiceability)), 1266 _tag_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<uint64_t>(1, mtServiceability)) { } 1267 1268 ~TagObjectCollector() { 1269 delete _object_results; 1270 delete _tag_results; 1271 } 1272 1273 bool some_dead_found() const { return _some_dead_found; } 1274 1275 // for each tagged object check if the tag value matches 1276 // - if it matches then we create a JNI local reference to the object 1277 // and record the reference and tag value. 1278 // 1279 void do_entry(JvmtiTagMapEntry* entry) { 1280 for (int i=0; i<_tag_count; i++) { 1281 if (_tags[i] == entry->tag()) { 1282 // The reference in this tag map could be the only (implicitly weak) 1283 // reference to that object. If we hand it out, we need to keep it live wrt 1284 // SATB marking similar to other j.l.ref.Reference referents. This is 1285 // achieved by using a phantom load in the object() accessor. 1286 oop o = entry->object(); 1287 if (o == NULL) { 1288 _some_dead_found = true; 1289 // skip this whole entry 1290 return; 1291 } 1292 assert(o != NULL && Universe::heap()->is_in(o), "sanity check"); 1293 jobject ref = JNIHandles::make_local(_thread, o); 1294 _object_results->append(ref); 1295 _tag_results->append((uint64_t)entry->tag()); 1296 } 1297 } 1298 } 1299 1300 // return the results from the collection 1301 // 1302 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1303 jvmtiError error; 1304 int count = _object_results->length(); 1305 assert(count >= 0, "sanity check"); 1306 1307 // if object_result_ptr is not NULL then allocate the result and copy 1308 // in the object references. 1309 if (object_result_ptr != NULL) { 1310 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr); 1311 if (error != JVMTI_ERROR_NONE) { 1312 return error; 1313 } 1314 for (int i=0; i<count; i++) { 1315 (*object_result_ptr)[i] = _object_results->at(i); 1316 } 1317 } 1318 1319 // if tag_result_ptr is not NULL then allocate the result and copy 1320 // in the tag values. 1321 if (tag_result_ptr != NULL) { 1322 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr); 1323 if (error != JVMTI_ERROR_NONE) { 1324 if (object_result_ptr != NULL) { 1325 _env->Deallocate((unsigned char*)object_result_ptr); 1326 } 1327 return error; 1328 } 1329 for (int i=0; i<count; i++) { 1330 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i); 1331 } 1332 } 1333 1334 *count_ptr = count; 1335 return JVMTI_ERROR_NONE; 1336 } 1337 }; 1338 1339 // return the list of objects with the specified tags 1340 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags, 1341 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1342 1343 TagObjectCollector collector(env(), tags, count); 1344 { 1345 // iterate over all tagged objects 1346 MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag); 1347 // Can't post ObjectFree events here from a JavaThread, so this 1348 // will race with the gc_notification thread in the tiny 1349 // window where the object is not marked but hasn't been notified that 1350 // it is collected yet. 1351 entry_iterate(&collector); 1352 } 1353 return collector.result(count_ptr, object_result_ptr, tag_result_ptr); 1354 } 1355 1356 1357 // ObjectMarker is used to support the marking objects when walking the 1358 // heap. 1359 // 1360 // This implementation uses the existing mark bits in an object for 1361 // marking. Objects that are marked must later have their headers restored. 1362 // As most objects are unlocked and don't have their identity hash computed 1363 // we don't have to save their headers. Instead we save the headers that 1364 // are "interesting". Later when the headers are restored this implementation 1365 // restores all headers to their initial value and then restores the few 1366 // objects that had interesting headers. 1367 // 1368 // Future work: This implementation currently uses growable arrays to save 1369 // the oop and header of interesting objects. As an optimization we could 1370 // use the same technique as the GC and make use of the unused area 1371 // between top() and end(). 1372 // 1373 1374 // An ObjectClosure used to restore the mark bits of an object 1375 class RestoreMarksClosure : public ObjectClosure { 1376 public: 1377 void do_object(oop o) { 1378 if (o != NULL) { 1379 markWord mark = o->mark(); 1380 if (mark.is_marked()) { 1381 o->init_mark(); 1382 } 1383 } 1384 } 1385 }; 1386 1387 // ObjectMarker provides the mark and visited functions 1388 class ObjectMarker : AllStatic { 1389 private: 1390 // saved headers 1391 static GrowableArray<oop>* _saved_oop_stack; 1392 static GrowableArray<markWord>* _saved_mark_stack; 1393 static bool _needs_reset; // do we need to reset mark bits? 1394 1395 public: 1396 static void init(); // initialize 1397 static void done(); // clean-up 1398 1399 static inline void mark(oop o); // mark an object 1400 static inline bool visited(oop o); // check if object has been visited 1401 1402 static inline bool needs_reset() { return _needs_reset; } 1403 static inline void set_needs_reset(bool v) { _needs_reset = v; } 1404 }; 1405 1406 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL; 1407 GrowableArray<markWord>* ObjectMarker::_saved_mark_stack = NULL; 1408 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default 1409 1410 // initialize ObjectMarker - prepares for object marking 1411 void ObjectMarker::init() { 1412 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 1413 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); 1414 1415 // prepare heap for iteration 1416 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1417 1418 // create stacks for interesting headers 1419 _saved_mark_stack = new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<markWord>(4000, mtServiceability); 1420 _saved_oop_stack = new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<oop>(4000, mtServiceability); 1421 1422 if (UseBiasedLocking) { 1423 BiasedLocking::preserve_marks(); 1424 } 1425 } 1426 1427 // Object marking is done so restore object headers 1428 void ObjectMarker::done() { 1429 // iterate over all objects and restore the mark bits to 1430 // their initial value 1431 RestoreMarksClosure blk; 1432 if (needs_reset()) { 1433 Universe::heap()->object_iterate(&blk); 1434 } else { 1435 // We don't need to reset mark bits on this call, but reset the 1436 // flag to the default for the next call. 1437 set_needs_reset(true); 1438 } 1439 1440 // now restore the interesting headers 1441 for (int i = 0; i < _saved_oop_stack->length(); i++) { 1442 oop o = _saved_oop_stack->at(i); 1443 markWord mark = _saved_mark_stack->at(i); 1444 o->set_mark(mark); 1445 } 1446 1447 if (UseBiasedLocking) { 1448 BiasedLocking::restore_marks(); 1449 } 1450 1451 // free the stacks 1452 delete _saved_oop_stack; 1453 delete _saved_mark_stack; 1454 } 1455 1456 // mark an object 1457 inline void ObjectMarker::mark(oop o) { 1458 assert(Universe::heap()->is_in(o), "sanity check"); 1459 assert(!o->mark().is_marked(), "should only mark an object once"); 1460 1461 // object's mark word 1462 markWord mark = o->mark(); 1463 1464 if (o->mark_must_be_preserved(mark)) { 1465 _saved_mark_stack->push(mark); 1466 _saved_oop_stack->push(o); 1467 } 1468 1469 // mark the object 1470 o->set_mark(markWord::prototype().set_marked()); 1471 } 1472 1473 // return true if object is marked 1474 inline bool ObjectMarker::visited(oop o) { 1475 return o->mark().is_marked(); 1476 } 1477 1478 // Stack allocated class to help ensure that ObjectMarker is used 1479 // correctly. Constructor initializes ObjectMarker, destructor calls 1480 // ObjectMarker's done() function to restore object headers. 1481 class ObjectMarkerController : public StackObj { 1482 public: 1483 ObjectMarkerController() { 1484 ObjectMarker::init(); 1485 } 1486 ~ObjectMarkerController() { 1487 ObjectMarker::done(); 1488 } 1489 }; 1490 1491 1492 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind 1493 // (not performance critical as only used for roots) 1494 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) { 1495 switch (kind) { 1496 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL; 1497 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS; 1498 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL; 1499 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL; 1500 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD; 1501 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER; 1502 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER; 1503 } 1504 } 1505 1506 // Base class for all heap walk contexts. The base class maintains a flag 1507 // to indicate if the context is valid or not. 1508 class HeapWalkContext { 1509 private: 1510 bool _valid; 1511 public: 1512 HeapWalkContext(bool valid) { _valid = valid; } 1513 void invalidate() { _valid = false; } 1514 bool is_valid() const { return _valid; } 1515 }; 1516 1517 // A basic heap walk context for the deprecated heap walking functions. 1518 // The context for a basic heap walk are the callbacks and fields used by 1519 // the referrer caching scheme. 1520 class BasicHeapWalkContext: public HeapWalkContext { 1521 private: 1522 jvmtiHeapRootCallback _heap_root_callback; 1523 jvmtiStackReferenceCallback _stack_ref_callback; 1524 jvmtiObjectReferenceCallback _object_ref_callback; 1525 1526 // used for caching 1527 oop _last_referrer; 1528 jlong _last_referrer_tag; 1529 1530 public: 1531 BasicHeapWalkContext() : HeapWalkContext(false) { } 1532 1533 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback, 1534 jvmtiStackReferenceCallback stack_ref_callback, 1535 jvmtiObjectReferenceCallback object_ref_callback) : 1536 HeapWalkContext(true), 1537 _heap_root_callback(heap_root_callback), 1538 _stack_ref_callback(stack_ref_callback), 1539 _object_ref_callback(object_ref_callback), 1540 _last_referrer(NULL), 1541 _last_referrer_tag(0) { 1542 } 1543 1544 // accessors 1545 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; } 1546 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; } 1547 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; } 1548 1549 oop last_referrer() const { return _last_referrer; } 1550 void set_last_referrer(oop referrer) { _last_referrer = referrer; } 1551 jlong last_referrer_tag() const { return _last_referrer_tag; } 1552 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; } 1553 }; 1554 1555 // The advanced heap walk context for the FollowReferences functions. 1556 // The context is the callbacks, and the fields used for filtering. 1557 class AdvancedHeapWalkContext: public HeapWalkContext { 1558 private: 1559 jint _heap_filter; 1560 Klass* _klass_filter; 1561 const jvmtiHeapCallbacks* _heap_callbacks; 1562 1563 public: 1564 AdvancedHeapWalkContext() : HeapWalkContext(false) { } 1565 1566 AdvancedHeapWalkContext(jint heap_filter, 1567 Klass* klass_filter, 1568 const jvmtiHeapCallbacks* heap_callbacks) : 1569 HeapWalkContext(true), 1570 _heap_filter(heap_filter), 1571 _klass_filter(klass_filter), 1572 _heap_callbacks(heap_callbacks) { 1573 } 1574 1575 // accessors 1576 jint heap_filter() const { return _heap_filter; } 1577 Klass* klass_filter() const { return _klass_filter; } 1578 1579 const jvmtiHeapReferenceCallback heap_reference_callback() const { 1580 return _heap_callbacks->heap_reference_callback; 1581 }; 1582 const jvmtiPrimitiveFieldCallback primitive_field_callback() const { 1583 return _heap_callbacks->primitive_field_callback; 1584 } 1585 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const { 1586 return _heap_callbacks->array_primitive_value_callback; 1587 } 1588 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const { 1589 return _heap_callbacks->string_primitive_value_callback; 1590 } 1591 }; 1592 1593 // The CallbackInvoker is a class with static functions that the heap walk can call 1594 // into to invoke callbacks. It works in one of two modes. The "basic" mode is 1595 // used for the deprecated IterateOverReachableObjects functions. The "advanced" 1596 // mode is for the newer FollowReferences function which supports a lot of 1597 // additional callbacks. 1598 class CallbackInvoker : AllStatic { 1599 private: 1600 // heap walk styles 1601 enum { basic, advanced }; 1602 static int _heap_walk_type; 1603 static bool is_basic_heap_walk() { return _heap_walk_type == basic; } 1604 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; } 1605 1606 // context for basic style heap walk 1607 static BasicHeapWalkContext _basic_context; 1608 static BasicHeapWalkContext* basic_context() { 1609 assert(_basic_context.is_valid(), "invalid"); 1610 return &_basic_context; 1611 } 1612 1613 // context for advanced style heap walk 1614 static AdvancedHeapWalkContext _advanced_context; 1615 static AdvancedHeapWalkContext* advanced_context() { 1616 assert(_advanced_context.is_valid(), "invalid"); 1617 return &_advanced_context; 1618 } 1619 1620 // context needed for all heap walks 1621 static JvmtiTagMap* _tag_map; 1622 static const void* _user_data; 1623 static GrowableArray<oop>* _visit_stack; 1624 1625 // accessors 1626 static JvmtiTagMap* tag_map() { return _tag_map; } 1627 static const void* user_data() { return _user_data; } 1628 static GrowableArray<oop>* visit_stack() { return _visit_stack; } 1629 1630 // if the object hasn't been visited then push it onto the visit stack 1631 // so that it will be visited later 1632 static inline bool check_for_visit(oop obj) { 1633 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj); 1634 return true; 1635 } 1636 1637 // invoke basic style callbacks 1638 static inline bool invoke_basic_heap_root_callback 1639 (jvmtiHeapRootKind root_kind, oop obj); 1640 static inline bool invoke_basic_stack_ref_callback 1641 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method, 1642 int slot, oop obj); 1643 static inline bool invoke_basic_object_reference_callback 1644 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index); 1645 1646 // invoke advanced style callbacks 1647 static inline bool invoke_advanced_heap_root_callback 1648 (jvmtiHeapReferenceKind ref_kind, oop obj); 1649 static inline bool invoke_advanced_stack_ref_callback 1650 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth, 1651 jmethodID method, jlocation bci, jint slot, oop obj); 1652 static inline bool invoke_advanced_object_reference_callback 1653 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index); 1654 1655 // used to report the value of primitive fields 1656 static inline bool report_primitive_field 1657 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type); 1658 1659 public: 1660 // initialize for basic mode 1661 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1662 GrowableArray<oop>* visit_stack, 1663 const void* user_data, 1664 BasicHeapWalkContext context); 1665 1666 // initialize for advanced mode 1667 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1668 GrowableArray<oop>* visit_stack, 1669 const void* user_data, 1670 AdvancedHeapWalkContext context); 1671 1672 // functions to report roots 1673 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o); 1674 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth, 1675 jmethodID m, oop o); 1676 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth, 1677 jmethodID method, jlocation bci, jint slot, oop o); 1678 1679 // functions to report references 1680 static inline bool report_array_element_reference(oop referrer, oop referree, jint index); 1681 static inline bool report_class_reference(oop referrer, oop referree); 1682 static inline bool report_class_loader_reference(oop referrer, oop referree); 1683 static inline bool report_signers_reference(oop referrer, oop referree); 1684 static inline bool report_protection_domain_reference(oop referrer, oop referree); 1685 static inline bool report_superclass_reference(oop referrer, oop referree); 1686 static inline bool report_interface_reference(oop referrer, oop referree); 1687 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot); 1688 static inline bool report_field_reference(oop referrer, oop referree, jint slot); 1689 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index); 1690 static inline bool report_primitive_array_values(oop array); 1691 static inline bool report_string_value(oop str); 1692 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type); 1693 static inline bool report_primitive_static_field(oop o, jint index, address value, char type); 1694 }; 1695 1696 // statics 1697 int CallbackInvoker::_heap_walk_type; 1698 BasicHeapWalkContext CallbackInvoker::_basic_context; 1699 AdvancedHeapWalkContext CallbackInvoker::_advanced_context; 1700 JvmtiTagMap* CallbackInvoker::_tag_map; 1701 const void* CallbackInvoker::_user_data; 1702 GrowableArray<oop>* CallbackInvoker::_visit_stack; 1703 1704 // initialize for basic heap walk (IterateOverReachableObjects et al) 1705 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1706 GrowableArray<oop>* visit_stack, 1707 const void* user_data, 1708 BasicHeapWalkContext context) { 1709 _tag_map = tag_map; 1710 _visit_stack = visit_stack; 1711 _user_data = user_data; 1712 _basic_context = context; 1713 _advanced_context.invalidate(); // will trigger assertion if used 1714 _heap_walk_type = basic; 1715 } 1716 1717 // initialize for advanced heap walk (FollowReferences) 1718 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1719 GrowableArray<oop>* visit_stack, 1720 const void* user_data, 1721 AdvancedHeapWalkContext context) { 1722 _tag_map = tag_map; 1723 _visit_stack = visit_stack; 1724 _user_data = user_data; 1725 _advanced_context = context; 1726 _basic_context.invalidate(); // will trigger assertion if used 1727 _heap_walk_type = advanced; 1728 } 1729 1730 1731 // invoke basic style heap root callback 1732 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) { 1733 // if we heap roots should be reported 1734 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback(); 1735 if (cb == NULL) { 1736 return check_for_visit(obj); 1737 } 1738 1739 CallbackWrapper wrapper(tag_map(), obj); 1740 jvmtiIterationControl control = (*cb)(root_kind, 1741 wrapper.klass_tag(), 1742 wrapper.obj_size(), 1743 wrapper.obj_tag_p(), 1744 (void*)user_data()); 1745 // push root to visit stack when following references 1746 if (control == JVMTI_ITERATION_CONTINUE && 1747 basic_context()->object_ref_callback() != NULL) { 1748 visit_stack()->push(obj); 1749 } 1750 return control != JVMTI_ITERATION_ABORT; 1751 } 1752 1753 // invoke basic style stack ref callback 1754 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind, 1755 jlong thread_tag, 1756 jint depth, 1757 jmethodID method, 1758 int slot, 1759 oop obj) { 1760 // if we stack refs should be reported 1761 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback(); 1762 if (cb == NULL) { 1763 return check_for_visit(obj); 1764 } 1765 1766 CallbackWrapper wrapper(tag_map(), obj); 1767 jvmtiIterationControl control = (*cb)(root_kind, 1768 wrapper.klass_tag(), 1769 wrapper.obj_size(), 1770 wrapper.obj_tag_p(), 1771 thread_tag, 1772 depth, 1773 method, 1774 slot, 1775 (void*)user_data()); 1776 // push root to visit stack when following references 1777 if (control == JVMTI_ITERATION_CONTINUE && 1778 basic_context()->object_ref_callback() != NULL) { 1779 visit_stack()->push(obj); 1780 } 1781 return control != JVMTI_ITERATION_ABORT; 1782 } 1783 1784 // invoke basic style object reference callback 1785 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind, 1786 oop referrer, 1787 oop referree, 1788 jint index) { 1789 1790 BasicHeapWalkContext* context = basic_context(); 1791 1792 // callback requires the referrer's tag. If it's the same referrer 1793 // as the last call then we use the cached value. 1794 jlong referrer_tag; 1795 if (referrer == context->last_referrer()) { 1796 referrer_tag = context->last_referrer_tag(); 1797 } else { 1798 referrer_tag = tag_for(tag_map(), referrer); 1799 } 1800 1801 // do the callback 1802 CallbackWrapper wrapper(tag_map(), referree); 1803 jvmtiObjectReferenceCallback cb = context->object_ref_callback(); 1804 jvmtiIterationControl control = (*cb)(ref_kind, 1805 wrapper.klass_tag(), 1806 wrapper.obj_size(), 1807 wrapper.obj_tag_p(), 1808 referrer_tag, 1809 index, 1810 (void*)user_data()); 1811 1812 // record referrer and referrer tag. For self-references record the 1813 // tag value from the callback as this might differ from referrer_tag. 1814 context->set_last_referrer(referrer); 1815 if (referrer == referree) { 1816 context->set_last_referrer_tag(*wrapper.obj_tag_p()); 1817 } else { 1818 context->set_last_referrer_tag(referrer_tag); 1819 } 1820 1821 if (control == JVMTI_ITERATION_CONTINUE) { 1822 return check_for_visit(referree); 1823 } else { 1824 return control != JVMTI_ITERATION_ABORT; 1825 } 1826 } 1827 1828 // invoke advanced style heap root callback 1829 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind, 1830 oop obj) { 1831 AdvancedHeapWalkContext* context = advanced_context(); 1832 1833 // check that callback is provided 1834 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 1835 if (cb == NULL) { 1836 return check_for_visit(obj); 1837 } 1838 1839 // apply class filter 1840 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 1841 return check_for_visit(obj); 1842 } 1843 1844 // setup the callback wrapper 1845 CallbackWrapper wrapper(tag_map(), obj); 1846 1847 // apply tag filter 1848 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 1849 wrapper.klass_tag(), 1850 context->heap_filter())) { 1851 return check_for_visit(obj); 1852 } 1853 1854 // for arrays we need the length, otherwise -1 1855 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 1856 1857 // invoke the callback 1858 jint res = (*cb)(ref_kind, 1859 NULL, // referrer info 1860 wrapper.klass_tag(), 1861 0, // referrer_class_tag is 0 for heap root 1862 wrapper.obj_size(), 1863 wrapper.obj_tag_p(), 1864 NULL, // referrer_tag_p 1865 len, 1866 (void*)user_data()); 1867 if (res & JVMTI_VISIT_ABORT) { 1868 return false;// referrer class tag 1869 } 1870 if (res & JVMTI_VISIT_OBJECTS) { 1871 check_for_visit(obj); 1872 } 1873 return true; 1874 } 1875 1876 // report a reference from a thread stack to an object 1877 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind, 1878 jlong thread_tag, 1879 jlong tid, 1880 int depth, 1881 jmethodID method, 1882 jlocation bci, 1883 jint slot, 1884 oop obj) { 1885 AdvancedHeapWalkContext* context = advanced_context(); 1886 1887 // check that callback is provider 1888 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 1889 if (cb == NULL) { 1890 return check_for_visit(obj); 1891 } 1892 1893 // apply class filter 1894 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 1895 return check_for_visit(obj); 1896 } 1897 1898 // setup the callback wrapper 1899 CallbackWrapper wrapper(tag_map(), obj); 1900 1901 // apply tag filter 1902 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 1903 wrapper.klass_tag(), 1904 context->heap_filter())) { 1905 return check_for_visit(obj); 1906 } 1907 1908 // setup the referrer info 1909 jvmtiHeapReferenceInfo reference_info; 1910 reference_info.stack_local.thread_tag = thread_tag; 1911 reference_info.stack_local.thread_id = tid; 1912 reference_info.stack_local.depth = depth; 1913 reference_info.stack_local.method = method; 1914 reference_info.stack_local.location = bci; 1915 reference_info.stack_local.slot = slot; 1916 1917 // for arrays we need the length, otherwise -1 1918 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 1919 1920 // call into the agent 1921 int res = (*cb)(ref_kind, 1922 &reference_info, 1923 wrapper.klass_tag(), 1924 0, // referrer_class_tag is 0 for heap root (stack) 1925 wrapper.obj_size(), 1926 wrapper.obj_tag_p(), 1927 NULL, // referrer_tag is 0 for root 1928 len, 1929 (void*)user_data()); 1930 1931 if (res & JVMTI_VISIT_ABORT) { 1932 return false; 1933 } 1934 if (res & JVMTI_VISIT_OBJECTS) { 1935 check_for_visit(obj); 1936 } 1937 return true; 1938 } 1939 1940 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback 1941 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed. 1942 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \ 1943 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \ 1944 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \ 1945 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \ 1946 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \ 1947 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL)) 1948 1949 // invoke the object reference callback to report a reference 1950 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind, 1951 oop referrer, 1952 oop obj, 1953 jint index) 1954 { 1955 // field index is only valid field in reference_info 1956 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1957 1958 AdvancedHeapWalkContext* context = advanced_context(); 1959 1960 // check that callback is provider 1961 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 1962 if (cb == NULL) { 1963 return check_for_visit(obj); 1964 } 1965 1966 // apply class filter 1967 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 1968 return check_for_visit(obj); 1969 } 1970 1971 // setup the callback wrapper 1972 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj); 1973 1974 // apply tag filter 1975 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 1976 wrapper.klass_tag(), 1977 context->heap_filter())) { 1978 return check_for_visit(obj); 1979 } 1980 1981 // field index is only valid field in reference_info 1982 reference_info.field.index = index; 1983 1984 // for arrays we need the length, otherwise -1 1985 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 1986 1987 // invoke the callback 1988 int res = (*cb)(ref_kind, 1989 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL, 1990 wrapper.klass_tag(), 1991 wrapper.referrer_klass_tag(), 1992 wrapper.obj_size(), 1993 wrapper.obj_tag_p(), 1994 wrapper.referrer_tag_p(), 1995 len, 1996 (void*)user_data()); 1997 1998 if (res & JVMTI_VISIT_ABORT) { 1999 return false; 2000 } 2001 if (res & JVMTI_VISIT_OBJECTS) { 2002 check_for_visit(obj); 2003 } 2004 return true; 2005 } 2006 2007 // report a "simple root" 2008 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) { 2009 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL && 2010 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root"); 2011 2012 if (is_basic_heap_walk()) { 2013 // map to old style root kind 2014 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind); 2015 return invoke_basic_heap_root_callback(root_kind, obj); 2016 } else { 2017 assert(is_advanced_heap_walk(), "wrong heap walk type"); 2018 return invoke_advanced_heap_root_callback(kind, obj); 2019 } 2020 } 2021 2022 2023 // invoke the primitive array values 2024 inline bool CallbackInvoker::report_primitive_array_values(oop obj) { 2025 assert(obj->is_typeArray(), "not a primitive array"); 2026 2027 AdvancedHeapWalkContext* context = advanced_context(); 2028 assert(context->array_primitive_value_callback() != NULL, "no callback"); 2029 2030 // apply class filter 2031 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2032 return true; 2033 } 2034 2035 CallbackWrapper wrapper(tag_map(), obj); 2036 2037 // apply tag filter 2038 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2039 wrapper.klass_tag(), 2040 context->heap_filter())) { 2041 return true; 2042 } 2043 2044 // invoke the callback 2045 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(), 2046 &wrapper, 2047 obj, 2048 (void*)user_data()); 2049 return (!(res & JVMTI_VISIT_ABORT)); 2050 } 2051 2052 // invoke the string value callback 2053 inline bool CallbackInvoker::report_string_value(oop str) { 2054 assert(str->klass() == vmClasses::String_klass(), "not a string"); 2055 2056 AdvancedHeapWalkContext* context = advanced_context(); 2057 assert(context->string_primitive_value_callback() != NULL, "no callback"); 2058 2059 // apply class filter 2060 if (is_filtered_by_klass_filter(str, context->klass_filter())) { 2061 return true; 2062 } 2063 2064 CallbackWrapper wrapper(tag_map(), str); 2065 2066 // apply tag filter 2067 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2068 wrapper.klass_tag(), 2069 context->heap_filter())) { 2070 return true; 2071 } 2072 2073 // invoke the callback 2074 int res = invoke_string_value_callback(context->string_primitive_value_callback(), 2075 &wrapper, 2076 str, 2077 (void*)user_data()); 2078 return (!(res & JVMTI_VISIT_ABORT)); 2079 } 2080 2081 // invoke the primitive field callback 2082 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind, 2083 oop obj, 2084 jint index, 2085 address addr, 2086 char type) 2087 { 2088 // for primitive fields only the index will be set 2089 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2090 2091 AdvancedHeapWalkContext* context = advanced_context(); 2092 assert(context->primitive_field_callback() != NULL, "no callback"); 2093 2094 // apply class filter 2095 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2096 return true; 2097 } 2098 2099 CallbackWrapper wrapper(tag_map(), obj); 2100 2101 // apply tag filter 2102 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2103 wrapper.klass_tag(), 2104 context->heap_filter())) { 2105 return true; 2106 } 2107 2108 // the field index in the referrer 2109 reference_info.field.index = index; 2110 2111 // map the type 2112 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 2113 2114 // setup the jvalue 2115 jvalue value; 2116 copy_to_jvalue(&value, addr, value_type); 2117 2118 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback(); 2119 int res = (*cb)(ref_kind, 2120 &reference_info, 2121 wrapper.klass_tag(), 2122 wrapper.obj_tag_p(), 2123 value, 2124 value_type, 2125 (void*)user_data()); 2126 return (!(res & JVMTI_VISIT_ABORT)); 2127 } 2128 2129 2130 // instance field 2131 inline bool CallbackInvoker::report_primitive_instance_field(oop obj, 2132 jint index, 2133 address value, 2134 char type) { 2135 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD, 2136 obj, 2137 index, 2138 value, 2139 type); 2140 } 2141 2142 // static field 2143 inline bool CallbackInvoker::report_primitive_static_field(oop obj, 2144 jint index, 2145 address value, 2146 char type) { 2147 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 2148 obj, 2149 index, 2150 value, 2151 type); 2152 } 2153 2154 // report a JNI local (root object) to the profiler 2155 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) { 2156 if (is_basic_heap_walk()) { 2157 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL, 2158 thread_tag, 2159 depth, 2160 m, 2161 -1, 2162 obj); 2163 } else { 2164 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL, 2165 thread_tag, tid, 2166 depth, 2167 m, 2168 (jlocation)-1, 2169 -1, 2170 obj); 2171 } 2172 } 2173 2174 2175 // report a local (stack reference, root object) 2176 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag, 2177 jlong tid, 2178 jint depth, 2179 jmethodID method, 2180 jlocation bci, 2181 jint slot, 2182 oop obj) { 2183 if (is_basic_heap_walk()) { 2184 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL, 2185 thread_tag, 2186 depth, 2187 method, 2188 slot, 2189 obj); 2190 } else { 2191 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL, 2192 thread_tag, 2193 tid, 2194 depth, 2195 method, 2196 bci, 2197 slot, 2198 obj); 2199 } 2200 } 2201 2202 // report an object referencing a class. 2203 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) { 2204 if (is_basic_heap_walk()) { 2205 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2206 } else { 2207 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1); 2208 } 2209 } 2210 2211 // report a class referencing its class loader. 2212 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) { 2213 if (is_basic_heap_walk()) { 2214 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2215 } else { 2216 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2217 } 2218 } 2219 2220 // report a class referencing its signers. 2221 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) { 2222 if (is_basic_heap_walk()) { 2223 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1); 2224 } else { 2225 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1); 2226 } 2227 } 2228 2229 // report a class referencing its protection domain.. 2230 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) { 2231 if (is_basic_heap_walk()) { 2232 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2233 } else { 2234 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2235 } 2236 } 2237 2238 // report a class referencing its superclass. 2239 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) { 2240 if (is_basic_heap_walk()) { 2241 // Send this to be consistent with past implementation 2242 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2243 } else { 2244 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1); 2245 } 2246 } 2247 2248 // report a class referencing one of its interfaces. 2249 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) { 2250 if (is_basic_heap_walk()) { 2251 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1); 2252 } else { 2253 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1); 2254 } 2255 } 2256 2257 // report a class referencing one of its static fields. 2258 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) { 2259 if (is_basic_heap_walk()) { 2260 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2261 } else { 2262 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2263 } 2264 } 2265 2266 // report an array referencing an element object 2267 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) { 2268 if (is_basic_heap_walk()) { 2269 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2270 } else { 2271 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2272 } 2273 } 2274 2275 // report an object referencing an instance field object 2276 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) { 2277 if (is_basic_heap_walk()) { 2278 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot); 2279 } else { 2280 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot); 2281 } 2282 } 2283 2284 // report an array referencing an element object 2285 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) { 2286 if (is_basic_heap_walk()) { 2287 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2288 } else { 2289 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2290 } 2291 } 2292 2293 // A supporting closure used to process simple roots 2294 class SimpleRootsClosure : public OopClosure { 2295 private: 2296 jvmtiHeapReferenceKind _kind; 2297 bool _continue; 2298 2299 jvmtiHeapReferenceKind root_kind() { return _kind; } 2300 2301 public: 2302 void set_kind(jvmtiHeapReferenceKind kind) { 2303 _kind = kind; 2304 _continue = true; 2305 } 2306 2307 inline bool stopped() { 2308 return !_continue; 2309 } 2310 2311 void do_oop(oop* obj_p) { 2312 // iteration has terminated 2313 if (stopped()) { 2314 return; 2315 } 2316 2317 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p); 2318 // ignore null 2319 if (o == NULL) { 2320 return; 2321 } 2322 2323 assert(Universe::heap()->is_in(o), "should be impossible"); 2324 2325 jvmtiHeapReferenceKind kind = root_kind(); 2326 2327 // invoke the callback 2328 _continue = CallbackInvoker::report_simple_root(kind, o); 2329 2330 } 2331 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2332 }; 2333 2334 // A supporting closure used to process JNI locals 2335 class JNILocalRootsClosure : public OopClosure { 2336 private: 2337 jlong _thread_tag; 2338 jlong _tid; 2339 jint _depth; 2340 jmethodID _method; 2341 bool _continue; 2342 public: 2343 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) { 2344 _thread_tag = thread_tag; 2345 _tid = tid; 2346 _depth = depth; 2347 _method = method; 2348 _continue = true; 2349 } 2350 2351 inline bool stopped() { 2352 return !_continue; 2353 } 2354 2355 void do_oop(oop* obj_p) { 2356 // iteration has terminated 2357 if (stopped()) { 2358 return; 2359 } 2360 2361 oop o = *obj_p; 2362 // ignore null 2363 if (o == NULL) { 2364 return; 2365 } 2366 2367 // invoke the callback 2368 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o); 2369 } 2370 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2371 }; 2372 2373 2374 // A VM operation to iterate over objects that are reachable from 2375 // a set of roots or an initial object. 2376 // 2377 // For VM_HeapWalkOperation the set of roots used is :- 2378 // 2379 // - All JNI global references 2380 // - All inflated monitors 2381 // - All classes loaded by the boot class loader (or all classes 2382 // in the event that class unloading is disabled) 2383 // - All java threads 2384 // - For each java thread then all locals and JNI local references 2385 // on the thread's execution stack 2386 // - All visible/explainable objects from Universes::oops_do 2387 // 2388 class VM_HeapWalkOperation: public VM_Operation { 2389 private: 2390 enum { 2391 initial_visit_stack_size = 4000 2392 }; 2393 2394 bool _is_advanced_heap_walk; // indicates FollowReferences 2395 JvmtiTagMap* _tag_map; 2396 Handle _initial_object; 2397 GrowableArray<oop>* _visit_stack; // the visit stack 2398 2399 // Dead object tags in JvmtiTagMap 2400 GrowableArray<jlong>* _dead_objects; 2401 2402 bool _following_object_refs; // are we following object references 2403 2404 bool _reporting_primitive_fields; // optional reporting 2405 bool _reporting_primitive_array_values; 2406 bool _reporting_string_values; 2407 2408 GrowableArray<oop>* create_visit_stack() { 2409 return new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<oop>(initial_visit_stack_size, mtServiceability); 2410 } 2411 2412 // accessors 2413 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; } 2414 JvmtiTagMap* tag_map() const { return _tag_map; } 2415 Handle initial_object() const { return _initial_object; } 2416 2417 bool is_following_references() const { return _following_object_refs; } 2418 2419 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; } 2420 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; } 2421 bool is_reporting_string_values() const { return _reporting_string_values; } 2422 2423 GrowableArray<oop>* visit_stack() const { return _visit_stack; } 2424 2425 // iterate over the various object types 2426 inline bool iterate_over_array(oop o); 2427 inline bool iterate_over_type_array(oop o); 2428 inline bool iterate_over_class(oop o); 2429 inline bool iterate_over_object(oop o); 2430 2431 // root collection 2432 inline bool collect_simple_roots(); 2433 inline bool collect_stack_roots(); 2434 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk); 2435 2436 // visit an object 2437 inline bool visit(oop o); 2438 2439 public: 2440 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2441 Handle initial_object, 2442 BasicHeapWalkContext callbacks, 2443 const void* user_data, 2444 GrowableArray<jlong>* objects); 2445 2446 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2447 Handle initial_object, 2448 AdvancedHeapWalkContext callbacks, 2449 const void* user_data, 2450 GrowableArray<jlong>* objects); 2451 2452 ~VM_HeapWalkOperation(); 2453 2454 VMOp_Type type() const { return VMOp_HeapWalkOperation; } 2455 void doit(); 2456 }; 2457 2458 2459 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2460 Handle initial_object, 2461 BasicHeapWalkContext callbacks, 2462 const void* user_data, 2463 GrowableArray<jlong>* objects) { 2464 _is_advanced_heap_walk = false; 2465 _tag_map = tag_map; 2466 _initial_object = initial_object; 2467 _following_object_refs = (callbacks.object_ref_callback() != NULL); 2468 _reporting_primitive_fields = false; 2469 _reporting_primitive_array_values = false; 2470 _reporting_string_values = false; 2471 _visit_stack = create_visit_stack(); 2472 _dead_objects = objects; 2473 2474 2475 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2476 } 2477 2478 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2479 Handle initial_object, 2480 AdvancedHeapWalkContext callbacks, 2481 const void* user_data, 2482 GrowableArray<jlong>* objects) { 2483 _is_advanced_heap_walk = true; 2484 _tag_map = tag_map; 2485 _initial_object = initial_object; 2486 _following_object_refs = true; 2487 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);; 2488 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);; 2489 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);; 2490 _visit_stack = create_visit_stack(); 2491 _dead_objects = objects; 2492 2493 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2494 } 2495 2496 VM_HeapWalkOperation::~VM_HeapWalkOperation() { 2497 if (_following_object_refs) { 2498 assert(_visit_stack != NULL, "checking"); 2499 delete _visit_stack; 2500 _visit_stack = NULL; 2501 } 2502 } 2503 2504 // an array references its class and has a reference to 2505 // each element in the array 2506 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) { 2507 objArrayOop array = objArrayOop(o); 2508 2509 // array reference to its class 2510 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror(); 2511 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2512 return false; 2513 } 2514 2515 // iterate over the array and report each reference to a 2516 // non-null element 2517 for (int index=0; index<array->length(); index++) { 2518 oop elem = array->obj_at(index); 2519 if (elem == NULL) { 2520 continue; 2521 } 2522 2523 // report the array reference o[index] = elem 2524 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) { 2525 return false; 2526 } 2527 } 2528 return true; 2529 } 2530 2531 // a type array references its class 2532 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) { 2533 Klass* k = o->klass(); 2534 oop mirror = k->java_mirror(); 2535 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2536 return false; 2537 } 2538 2539 // report the array contents if required 2540 if (is_reporting_primitive_array_values()) { 2541 if (!CallbackInvoker::report_primitive_array_values(o)) { 2542 return false; 2543 } 2544 } 2545 return true; 2546 } 2547 2548 #ifdef ASSERT 2549 // verify that a static oop field is in range 2550 static inline bool verify_static_oop(InstanceKlass* ik, 2551 oop mirror, int offset) { 2552 address obj_p = cast_from_oop<address>(mirror) + offset; 2553 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror); 2554 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize); 2555 assert(end >= start, "sanity check"); 2556 2557 if (obj_p >= start && obj_p < end) { 2558 return true; 2559 } else { 2560 return false; 2561 } 2562 } 2563 #endif // #ifdef ASSERT 2564 2565 // a class references its super class, interfaces, class loader, ... 2566 // and finally its static fields 2567 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) { 2568 int i; 2569 Klass* klass = java_lang_Class::as_Klass(java_class); 2570 2571 if (klass->is_instance_klass()) { 2572 InstanceKlass* ik = InstanceKlass::cast(klass); 2573 2574 // Ignore the class if it hasn't been initialized yet 2575 if (!ik->is_linked()) { 2576 return true; 2577 } 2578 2579 // get the java mirror 2580 oop mirror = klass->java_mirror(); 2581 2582 // super (only if something more interesting than java.lang.Object) 2583 InstanceKlass* java_super = ik->java_super(); 2584 if (java_super != NULL && java_super != vmClasses::Object_klass()) { 2585 oop super = java_super->java_mirror(); 2586 if (!CallbackInvoker::report_superclass_reference(mirror, super)) { 2587 return false; 2588 } 2589 } 2590 2591 // class loader 2592 oop cl = ik->class_loader(); 2593 if (cl != NULL) { 2594 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) { 2595 return false; 2596 } 2597 } 2598 2599 // protection domain 2600 oop pd = ik->protection_domain(); 2601 if (pd != NULL) { 2602 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) { 2603 return false; 2604 } 2605 } 2606 2607 // signers 2608 oop signers = ik->signers(); 2609 if (signers != NULL) { 2610 if (!CallbackInvoker::report_signers_reference(mirror, signers)) { 2611 return false; 2612 } 2613 } 2614 2615 // references from the constant pool 2616 { 2617 ConstantPool* pool = ik->constants(); 2618 for (int i = 1; i < pool->length(); i++) { 2619 constantTag tag = pool->tag_at(i).value(); 2620 if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) { 2621 oop entry; 2622 if (tag.is_string()) { 2623 entry = pool->resolved_string_at(i); 2624 // If the entry is non-null it is resolved. 2625 if (entry == NULL) { 2626 continue; 2627 } 2628 } else if (tag.is_klass()) { 2629 entry = pool->resolved_klass_at(i)->java_mirror(); 2630 } else { 2631 // Code generated by JIT compilers might not resolve constant 2632 // pool entries. Treat them as resolved if they are loaded. 2633 assert(tag.is_unresolved_klass(), "must be"); 2634 constantPoolHandle cp(Thread::current(), pool); 2635 Klass* klass = ConstantPool::klass_at_if_loaded(cp, i); 2636 if (klass == NULL) { 2637 continue; 2638 } 2639 entry = klass->java_mirror(); 2640 } 2641 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) { 2642 return false; 2643 } 2644 } 2645 } 2646 } 2647 2648 // interfaces 2649 // (These will already have been reported as references from the constant pool 2650 // but are specified by IterateOverReachableObjects and must be reported). 2651 Array<InstanceKlass*>* interfaces = ik->local_interfaces(); 2652 for (i = 0; i < interfaces->length(); i++) { 2653 oop interf = interfaces->at(i)->java_mirror(); 2654 if (interf == NULL) { 2655 continue; 2656 } 2657 if (!CallbackInvoker::report_interface_reference(mirror, interf)) { 2658 return false; 2659 } 2660 } 2661 2662 // iterate over the static fields 2663 2664 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 2665 for (i=0; i<field_map->field_count(); i++) { 2666 ClassFieldDescriptor* field = field_map->field_at(i); 2667 char type = field->field_type(); 2668 if (!is_primitive_field_type(type)) { 2669 oop fld_o = mirror->obj_field(field->field_offset()); 2670 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check"); 2671 if (fld_o != NULL) { 2672 int slot = field->field_index(); 2673 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) { 2674 delete field_map; 2675 return false; 2676 } 2677 } 2678 } else { 2679 if (is_reporting_primitive_fields()) { 2680 address addr = cast_from_oop<address>(mirror) + field->field_offset(); 2681 int slot = field->field_index(); 2682 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) { 2683 delete field_map; 2684 return false; 2685 } 2686 } 2687 } 2688 } 2689 delete field_map; 2690 2691 return true; 2692 } 2693 2694 return true; 2695 } 2696 2697 // an object references a class and its instance fields 2698 // (static fields are ignored here as we report these as 2699 // references from the class). 2700 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) { 2701 // reference to the class 2702 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) { 2703 return false; 2704 } 2705 2706 // iterate over instance fields 2707 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o); 2708 for (int i=0; i<field_map->field_count(); i++) { 2709 ClassFieldDescriptor* field = field_map->field_at(i); 2710 char type = field->field_type(); 2711 if (!is_primitive_field_type(type)) { 2712 oop fld_o = o->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field->field_offset()); 2713 // ignore any objects that aren't visible to profiler 2714 if (fld_o != NULL) { 2715 assert(Universe::heap()->is_in(fld_o), "unsafe code should not " 2716 "have references to Klass* anymore"); 2717 int slot = field->field_index(); 2718 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) { 2719 return false; 2720 } 2721 } 2722 } else { 2723 if (is_reporting_primitive_fields()) { 2724 // primitive instance field 2725 address addr = cast_from_oop<address>(o) + field->field_offset(); 2726 int slot = field->field_index(); 2727 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) { 2728 return false; 2729 } 2730 } 2731 } 2732 } 2733 2734 // if the object is a java.lang.String 2735 if (is_reporting_string_values() && 2736 o->klass() == vmClasses::String_klass()) { 2737 if (!CallbackInvoker::report_string_value(o)) { 2738 return false; 2739 } 2740 } 2741 return true; 2742 } 2743 2744 2745 // Collects all simple (non-stack) roots except for threads; 2746 // threads are handled in collect_stack_roots() as an optimization. 2747 // if there's a heap root callback provided then the callback is 2748 // invoked for each simple root. 2749 // if an object reference callback is provided then all simple 2750 // roots are pushed onto the marking stack so that they can be 2751 // processed later 2752 // 2753 inline bool VM_HeapWalkOperation::collect_simple_roots() { 2754 SimpleRootsClosure blk; 2755 2756 // JNI globals 2757 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL); 2758 JNIHandles::oops_do(&blk); 2759 if (blk.stopped()) { 2760 return false; 2761 } 2762 2763 // Preloaded classes and loader from the system dictionary 2764 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS); 2765 CLDToOopClosure cld_closure(&blk, false); 2766 ClassLoaderDataGraph::always_strong_cld_do(&cld_closure); 2767 if (blk.stopped()) { 2768 return false; 2769 } 2770 2771 // threads are now handled in collect_stack_roots() 2772 2773 // Other kinds of roots maintained by HotSpot 2774 // Many of these won't be visible but others (such as instances of important 2775 // exceptions) will be visible. 2776 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 2777 Universe::vm_global()->oops_do(&blk); 2778 if (blk.stopped()) { 2779 return false; 2780 } 2781 2782 return true; 2783 } 2784 2785 // Walk the stack of a given thread and find all references (locals 2786 // and JNI calls) and report these as stack references 2787 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread, 2788 JNILocalRootsClosure* blk) 2789 { 2790 oop threadObj = java_thread->threadObj(); 2791 assert(threadObj != NULL, "sanity check"); 2792 2793 // only need to get the thread's tag once per thread 2794 jlong thread_tag = tag_for(_tag_map, threadObj); 2795 2796 // also need the thread id 2797 jlong tid = java_lang_Thread::thread_id(threadObj); 2798 2799 2800 if (java_thread->has_last_Java_frame()) { 2801 2802 // vframes are resource allocated 2803 Thread* current_thread = Thread::current(); 2804 ResourceMark rm(current_thread); 2805 HandleMark hm(current_thread); 2806 2807 RegisterMap reg_map(java_thread); 2808 frame f = java_thread->last_frame(); 2809 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); 2810 2811 bool is_top_frame = true; 2812 int depth = 0; 2813 frame* last_entry_frame = NULL; 2814 2815 while (vf != NULL) { 2816 if (vf->is_java_frame()) { 2817 2818 // java frame (interpreted, compiled, ...) 2819 javaVFrame *jvf = javaVFrame::cast(vf); 2820 2821 // the jmethodID 2822 jmethodID method = jvf->method()->jmethod_id(); 2823 2824 if (!(jvf->method()->is_native())) { 2825 jlocation bci = (jlocation)jvf->bci(); 2826 StackValueCollection* locals = jvf->locals(); 2827 for (int slot=0; slot<locals->size(); slot++) { 2828 if (locals->at(slot)->type() == T_OBJECT) { 2829 oop o = locals->obj_at(slot)(); 2830 if (o == NULL) { 2831 continue; 2832 } 2833 2834 // stack reference 2835 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 2836 bci, slot, o)) { 2837 return false; 2838 } 2839 } 2840 } 2841 2842 StackValueCollection* exprs = jvf->expressions(); 2843 for (int index=0; index < exprs->size(); index++) { 2844 if (exprs->at(index)->type() == T_OBJECT) { 2845 oop o = exprs->obj_at(index)(); 2846 if (o == NULL) { 2847 continue; 2848 } 2849 2850 // stack reference 2851 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 2852 bci, locals->size() + index, o)) { 2853 return false; 2854 } 2855 } 2856 } 2857 2858 // Follow oops from compiled nmethod 2859 if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) { 2860 blk->set_context(thread_tag, tid, depth, method); 2861 jvf->cb()->as_nmethod()->oops_do(blk); 2862 } 2863 } else { 2864 blk->set_context(thread_tag, tid, depth, method); 2865 if (is_top_frame) { 2866 // JNI locals for the top frame. 2867 java_thread->active_handles()->oops_do(blk); 2868 } else { 2869 if (last_entry_frame != NULL) { 2870 // JNI locals for the entry frame 2871 assert(last_entry_frame->is_entry_frame(), "checking"); 2872 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk); 2873 } 2874 } 2875 } 2876 last_entry_frame = NULL; 2877 depth++; 2878 } else { 2879 // externalVFrame - for an entry frame then we report the JNI locals 2880 // when we find the corresponding javaVFrame 2881 frame* fr = vf->frame_pointer(); 2882 assert(fr != NULL, "sanity check"); 2883 if (fr->is_entry_frame()) { 2884 last_entry_frame = fr; 2885 } 2886 } 2887 2888 vf = vf->sender(); 2889 is_top_frame = false; 2890 } 2891 } else { 2892 // no last java frame but there may be JNI locals 2893 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL); 2894 java_thread->active_handles()->oops_do(blk); 2895 } 2896 return true; 2897 } 2898 2899 2900 // Collects the simple roots for all threads and collects all 2901 // stack roots - for each thread it walks the execution 2902 // stack to find all references and local JNI refs. 2903 inline bool VM_HeapWalkOperation::collect_stack_roots() { 2904 JNILocalRootsClosure blk; 2905 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) { 2906 oop threadObj = thread->threadObj(); 2907 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) { 2908 // Collect the simple root for this thread before we 2909 // collect its stack roots 2910 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, 2911 threadObj)) { 2912 return false; 2913 } 2914 if (!collect_stack_roots(thread, &blk)) { 2915 return false; 2916 } 2917 } 2918 } 2919 return true; 2920 } 2921 2922 // visit an object 2923 // first mark the object as visited 2924 // second get all the outbound references from this object (in other words, all 2925 // the objects referenced by this object). 2926 // 2927 bool VM_HeapWalkOperation::visit(oop o) { 2928 // mark object as visited 2929 assert(!ObjectMarker::visited(o), "can't visit same object more than once"); 2930 ObjectMarker::mark(o); 2931 2932 // instance 2933 if (o->is_instance()) { 2934 if (o->klass() == vmClasses::Class_klass()) { 2935 if (!java_lang_Class::is_primitive(o)) { 2936 // a java.lang.Class 2937 return iterate_over_class(o); 2938 } 2939 } else { 2940 return iterate_over_object(o); 2941 } 2942 } 2943 2944 // object array 2945 if (o->is_objArray()) { 2946 return iterate_over_array(o); 2947 } 2948 2949 // type array 2950 if (o->is_typeArray()) { 2951 return iterate_over_type_array(o); 2952 } 2953 2954 return true; 2955 } 2956 2957 void VM_HeapWalkOperation::doit() { 2958 ResourceMark rm; 2959 ObjectMarkerController marker; 2960 ClassFieldMapCacheMark cm; 2961 2962 JvmtiTagMap::check_hashmaps_for_heapwalk(_dead_objects); 2963 2964 assert(visit_stack()->is_empty(), "visit stack must be empty"); 2965 2966 // the heap walk starts with an initial object or the heap roots 2967 if (initial_object().is_null()) { 2968 // If either collect_stack_roots() or collect_simple_roots() 2969 // returns false at this point, then there are no mark bits 2970 // to reset. 2971 ObjectMarker::set_needs_reset(false); 2972 2973 // Calling collect_stack_roots() before collect_simple_roots() 2974 // can result in a big performance boost for an agent that is 2975 // focused on analyzing references in the thread stacks. 2976 if (!collect_stack_roots()) return; 2977 2978 if (!collect_simple_roots()) return; 2979 2980 // no early return so enable heap traversal to reset the mark bits 2981 ObjectMarker::set_needs_reset(true); 2982 } else { 2983 visit_stack()->push(initial_object()()); 2984 } 2985 2986 // object references required 2987 if (is_following_references()) { 2988 2989 // visit each object until all reachable objects have been 2990 // visited or the callback asked to terminate the iteration. 2991 while (!visit_stack()->is_empty()) { 2992 oop o = visit_stack()->pop(); 2993 if (!ObjectMarker::visited(o)) { 2994 if (!visit(o)) { 2995 break; 2996 } 2997 } 2998 } 2999 } 3000 } 3001 3002 // iterate over all objects that are reachable from a set of roots 3003 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback, 3004 jvmtiStackReferenceCallback stack_ref_callback, 3005 jvmtiObjectReferenceCallback object_ref_callback, 3006 const void* user_data) { 3007 JavaThread* jt = JavaThread::current(); 3008 EscapeBarrier eb(true, jt); 3009 eb.deoptimize_objects_all_threads(); 3010 Arena dead_object_arena(mtServiceability); 3011 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0); 3012 { 3013 MutexLocker ml(Heap_lock); 3014 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback); 3015 VM_HeapWalkOperation op(this, Handle(), context, user_data, &dead_objects); 3016 VMThread::execute(&op); 3017 } 3018 // Post events outside of Heap_lock 3019 post_dead_objects(&dead_objects); 3020 } 3021 3022 // iterate over all objects that are reachable from a given object 3023 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object, 3024 jvmtiObjectReferenceCallback object_ref_callback, 3025 const void* user_data) { 3026 oop obj = JNIHandles::resolve(object); 3027 Handle initial_object(Thread::current(), obj); 3028 3029 Arena dead_object_arena(mtServiceability); 3030 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0); 3031 { 3032 MutexLocker ml(Heap_lock); 3033 BasicHeapWalkContext context(NULL, NULL, object_ref_callback); 3034 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects); 3035 VMThread::execute(&op); 3036 } 3037 // Post events outside of Heap_lock 3038 post_dead_objects(&dead_objects); 3039 } 3040 3041 // follow references from an initial object or the GC roots 3042 void JvmtiTagMap::follow_references(jint heap_filter, 3043 Klass* klass, 3044 jobject object, 3045 const jvmtiHeapCallbacks* callbacks, 3046 const void* user_data) 3047 { 3048 oop obj = JNIHandles::resolve(object); 3049 JavaThread* jt = JavaThread::current(); 3050 Handle initial_object(jt, obj); 3051 // EA based optimizations that are tagged or reachable from initial_object are already reverted. 3052 EscapeBarrier eb(initial_object.is_null() && 3053 !(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED), 3054 jt); 3055 eb.deoptimize_objects_all_threads(); 3056 3057 Arena dead_object_arena(mtServiceability); 3058 GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0); 3059 { 3060 MutexLocker ml(Heap_lock); 3061 AdvancedHeapWalkContext context(heap_filter, klass, callbacks); 3062 VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects); 3063 VMThread::execute(&op); 3064 } 3065 // Post events outside of Heap_lock 3066 post_dead_objects(&dead_objects); 3067 } 3068 3069 // Concurrent GC needs to call this in relocation pause, so after the objects are moved 3070 // and have their new addresses, the table can be rehashed. 3071 void JvmtiTagMap::set_needs_rehashing() { 3072 assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause"); 3073 assert(Thread::current()->is_VM_thread(), "should be the VM thread"); 3074 3075 JvmtiEnvIterator it; 3076 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) { 3077 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3078 if (tag_map != NULL) { 3079 tag_map->_needs_rehashing = true; 3080 } 3081 } 3082 } 3083 3084 // Verify gc_notification follows set_needs_cleaning. 3085 DEBUG_ONLY(static bool notified_needs_cleaning = false;) 3086 3087 void JvmtiTagMap::set_needs_cleaning() { 3088 assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause"); 3089 assert(Thread::current()->is_VM_thread(), "should be the VM thread"); 3090 // Can't assert !notified_needs_cleaning; a partial GC might be upgraded 3091 // to a full GC and do this twice without intervening gc_notification. 3092 DEBUG_ONLY(notified_needs_cleaning = true;) 3093 3094 JvmtiEnvIterator it; 3095 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) { 3096 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3097 if (tag_map != NULL) { 3098 tag_map->_needs_cleaning = !tag_map->is_empty(); 3099 } 3100 } 3101 } 3102 3103 void JvmtiTagMap::gc_notification(size_t num_dead_entries) { 3104 assert(notified_needs_cleaning, "missing GC notification"); 3105 DEBUG_ONLY(notified_needs_cleaning = false;) 3106 3107 // Notify ServiceThread if there's work to do. 3108 { 3109 MonitorLocker ml(Service_lock, Mutex::_no_safepoint_check_flag); 3110 _has_object_free_events = (num_dead_entries != 0); 3111 if (_has_object_free_events) ml.notify_all(); 3112 } 3113 3114 // If no dead entries then cancel cleaning requests. 3115 if (num_dead_entries == 0) { 3116 JvmtiEnvIterator it; 3117 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) { 3118 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3119 if (tag_map != NULL) { 3120 MutexLocker ml (tag_map->lock(), Mutex::_no_safepoint_check_flag); 3121 tag_map->_needs_cleaning = false; 3122 } 3123 } 3124 } 3125 } 3126 3127 // Used by ServiceThread to discover there is work to do. 3128 bool JvmtiTagMap::has_object_free_events_and_reset() { 3129 assert_lock_strong(Service_lock); 3130 bool result = _has_object_free_events; 3131 _has_object_free_events = false; 3132 return result; 3133 } 3134 3135 // Used by ServiceThread to clean up tagmaps. 3136 void JvmtiTagMap::flush_all_object_free_events() { 3137 JavaThread* thread = JavaThread::current(); 3138 JvmtiEnvIterator it; 3139 for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) { 3140 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3141 if (tag_map != NULL) { 3142 tag_map->flush_object_free_events(); 3143 ThreadBlockInVM tbiv(thread); // Be safepoint-polite while looping. 3144 } 3145 } 3146 }