1 /*
   2  * Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/classLoaderDataGraph.hpp"
  27 #include "classfile/javaClasses.inline.hpp"
  28 #include "classfile/symbolTable.hpp"
  29 #include "classfile/vmClasses.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "gc/shared/collectedHeap.hpp"
  32 #include "jvmtifiles/jvmtiEnv.hpp"
  33 #include "logging/log.hpp"
  34 #include "memory/allocation.inline.hpp"
  35 #include "memory/resourceArea.hpp"
  36 #include "memory/universe.hpp"
  37 #include "oops/access.inline.hpp"
  38 #include "oops/arrayOop.hpp"
  39 #include "oops/constantPool.inline.hpp"
  40 #include "oops/instanceMirrorKlass.hpp"
  41 #include "oops/klass.inline.hpp"
  42 #include "oops/objArrayKlass.hpp"
  43 #include "oops/objArrayOop.inline.hpp"
  44 #include "oops/oop.inline.hpp"
  45 #include "oops/typeArrayOop.inline.hpp"
  46 #include "prims/jvmtiEventController.hpp"
  47 #include "prims/jvmtiEventController.inline.hpp"
  48 #include "prims/jvmtiExport.hpp"
  49 #include "prims/jvmtiImpl.hpp"
  50 #include "prims/jvmtiTagMap.hpp"
  51 #include "prims/jvmtiTagMapTable.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/frame.inline.hpp"
  54 #include "runtime/handles.inline.hpp"
  55 #include "runtime/interfaceSupport.inline.hpp"
  56 #include "runtime/javaCalls.hpp"
  57 #include "runtime/jniHandles.inline.hpp"
  58 #include "runtime/mutex.hpp"
  59 #include "runtime/mutexLocker.hpp"
  60 #include "runtime/reflectionUtils.hpp"
  61 #include "runtime/safepoint.hpp"
  62 #include "runtime/timerTrace.hpp"
  63 #include "runtime/thread.inline.hpp"
  64 #include "runtime/threadSMR.hpp"
  65 #include "runtime/vframe.hpp"
  66 #include "runtime/vmThread.hpp"
  67 #include "runtime/vmOperations.hpp"
  68 #include "utilities/macros.hpp"
  69 
  70 bool JvmtiTagMap::_has_object_free_events = false;
  71 
  72 // create a JvmtiTagMap
  73 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
  74   _env(env),
  75   _lock(Mutex::nosafepoint, "JvmtiTagMap_lock"),
  76   _needs_rehashing(false),
  77   _needs_cleaning(false) {
  78 
  79   assert(JvmtiThreadState_lock->is_locked(), "sanity check");
  80   assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
  81 
  82   _hashmap = new JvmtiTagMapTable();
  83 
  84   // finally add us to the environment
  85   ((JvmtiEnvBase *)env)->release_set_tag_map(this);
  86 }
  87 
  88 // destroy a JvmtiTagMap
  89 JvmtiTagMap::~JvmtiTagMap() {
  90 
  91   // no lock acquired as we assume the enclosing environment is
  92   // also being destroyed.
  93   ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
  94 
  95   // finally destroy the hashmap
  96   delete _hashmap;
  97   _hashmap = NULL;
  98 }
  99 
 100 // Called by env_dispose() to reclaim memory before deallocation.
 101 // Remove all the entries but keep the empty table intact.
 102 // This needs the table lock.
 103 void JvmtiTagMap::clear() {
 104   MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
 105   _hashmap->clear();
 106 }
 107 
 108 // returns the tag map for the given environments. If the tag map
 109 // doesn't exist then it is created.
 110 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
 111   JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire();
 112   if (tag_map == NULL) {
 113     MutexLocker mu(JvmtiThreadState_lock);
 114     tag_map = ((JvmtiEnvBase*)env)->tag_map();
 115     if (tag_map == NULL) {
 116       tag_map = new JvmtiTagMap(env);
 117     }
 118   } else {
 119     DEBUG_ONLY(JavaThread::current()->check_possible_safepoint());
 120   }
 121   return tag_map;
 122 }
 123 
 124 // iterate over all entries in the tag map.
 125 void JvmtiTagMap::entry_iterate(JvmtiTagMapEntryClosure* closure) {
 126   hashmap()->entry_iterate(closure);
 127 }
 128 
 129 // returns true if the hashmaps are empty
 130 bool JvmtiTagMap::is_empty() {
 131   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 132   return hashmap()->is_empty();
 133 }
 134 
 135 // This checks for posting and rehashing before operations that
 136 // this tagmap table.  The calls from a JavaThread only rehash, posting is
 137 // only done before heap walks.
 138 void JvmtiTagMap::check_hashmap(bool post_events) {
 139   assert(!post_events || SafepointSynchronize::is_at_safepoint(), "precondition");
 140   assert(is_locked(), "checking");
 141 
 142   if (is_empty()) { return; }
 143 
 144   if (_needs_cleaning &&
 145       post_events &&
 146       env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
 147     remove_dead_entries_locked(true /* post_object_free */);
 148   }
 149   if (_needs_rehashing) {
 150     log_info(jvmti, table)("TagMap table needs rehashing");
 151     hashmap()->rehash();
 152     _needs_rehashing = false;
 153   }
 154 }
 155 
 156 // This checks for posting and rehashing and is called from the heap walks.
 157 void JvmtiTagMap::check_hashmaps_for_heapwalk() {
 158   assert(SafepointSynchronize::is_at_safepoint(), "called from safepoints");
 159 
 160   // Verify that the tag map tables are valid and unconditionally post events
 161   // that are expected to be posted before gc_notification.
 162   JvmtiEnvIterator it;
 163   for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
 164     JvmtiTagMap* tag_map = env->tag_map_acquire();
 165     if (tag_map != NULL) {
 166       // The ZDriver may be walking the hashmaps concurrently so this lock is needed.
 167       MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag);
 168       tag_map->check_hashmap(/*post_events*/ true);
 169     }
 170   }
 171 }
 172 
 173 // Return the tag value for an object, or 0 if the object is
 174 // not tagged
 175 //
 176 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
 177   JvmtiTagMapEntry* entry = tag_map->hashmap()->find(o);
 178   if (entry == NULL) {
 179     return 0;
 180   } else {
 181     jlong tag = entry->tag();
 182     assert(tag != 0, "should not be zero");
 183     return entry->tag();
 184   }
 185 }
 186 
 187 
 188 // A CallbackWrapper is a support class for querying and tagging an object
 189 // around a callback to a profiler. The constructor does pre-callback
 190 // work to get the tag value, klass tag value, ... and the destructor
 191 // does the post-callback work of tagging or untagging the object.
 192 //
 193 // {
 194 //   CallbackWrapper wrapper(tag_map, o);
 195 //
 196 //   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
 197 //
 198 // } // wrapper goes out of scope here which results in the destructor
 199 //      checking to see if the object has been tagged, untagged, or the
 200 //      tag value has changed.
 201 //
 202 class CallbackWrapper : public StackObj {
 203  private:
 204   JvmtiTagMap* _tag_map;
 205   JvmtiTagMapTable* _hashmap;
 206   JvmtiTagMapEntry* _entry;
 207   oop _o;
 208   jlong _obj_size;
 209   jlong _obj_tag;
 210   jlong _klass_tag;
 211 
 212  protected:
 213   JvmtiTagMap* tag_map() const      { return _tag_map; }
 214 
 215   // invoked post-callback to tag, untag, or update the tag of an object
 216   void inline post_callback_tag_update(oop o, JvmtiTagMapTable* hashmap,
 217                                        JvmtiTagMapEntry* entry, jlong obj_tag);
 218  public:
 219   CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
 220     assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
 221            "MT unsafe or must be VM thread");
 222 
 223     // object to tag
 224     _o = o;
 225 
 226     // object size
 227     _obj_size = (jlong)_o->size() * wordSize;
 228 
 229     // record the context
 230     _tag_map = tag_map;
 231     _hashmap = tag_map->hashmap();
 232     _entry = _hashmap->find(_o);
 233 
 234     // get object tag
 235     _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
 236 
 237     // get the class and the class's tag value
 238     assert(vmClasses::Class_klass()->is_mirror_instance_klass(), "Is not?");
 239 
 240     _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
 241   }
 242 
 243   ~CallbackWrapper() {
 244     post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
 245   }
 246 
 247   inline jlong* obj_tag_p()                     { return &_obj_tag; }
 248   inline jlong obj_size() const                 { return _obj_size; }
 249   inline jlong obj_tag() const                  { return _obj_tag; }
 250   inline jlong klass_tag() const                { return _klass_tag; }
 251 };
 252 
 253 
 254 
 255 // callback post-callback to tag, untag, or update the tag of an object
 256 void inline CallbackWrapper::post_callback_tag_update(oop o,
 257                                                       JvmtiTagMapTable* hashmap,
 258                                                       JvmtiTagMapEntry* entry,
 259                                                       jlong obj_tag) {
 260   if (entry == NULL) {
 261     if (obj_tag != 0) {
 262       // callback has tagged the object
 263       assert(Thread::current()->is_VM_thread(), "must be VMThread");
 264       hashmap->add(o, obj_tag);
 265     }
 266   } else {
 267     // object was previously tagged - the callback may have untagged
 268     // the object or changed the tag value
 269     if (obj_tag == 0) {
 270       hashmap->remove(o);
 271     } else {
 272       if (obj_tag != entry->tag()) {
 273          entry->set_tag(obj_tag);
 274       }
 275     }
 276   }
 277 }
 278 
 279 // An extended CallbackWrapper used when reporting an object reference
 280 // to the agent.
 281 //
 282 // {
 283 //   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
 284 //
 285 //   (*callback)(wrapper.klass_tag(),
 286 //               wrapper.obj_size(),
 287 //               wrapper.obj_tag_p()
 288 //               wrapper.referrer_tag_p(), ...)
 289 //
 290 // } // wrapper goes out of scope here which results in the destructor
 291 //      checking to see if the referrer object has been tagged, untagged,
 292 //      or the tag value has changed.
 293 //
 294 class TwoOopCallbackWrapper : public CallbackWrapper {
 295  private:
 296   bool _is_reference_to_self;
 297   JvmtiTagMapTable* _referrer_hashmap;
 298   JvmtiTagMapEntry* _referrer_entry;
 299   oop _referrer;
 300   jlong _referrer_obj_tag;
 301   jlong _referrer_klass_tag;
 302   jlong* _referrer_tag_p;
 303 
 304   bool is_reference_to_self() const             { return _is_reference_to_self; }
 305 
 306  public:
 307   TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
 308     CallbackWrapper(tag_map, o)
 309   {
 310     // self reference needs to be handled in a special way
 311     _is_reference_to_self = (referrer == o);
 312 
 313     if (_is_reference_to_self) {
 314       _referrer_klass_tag = klass_tag();
 315       _referrer_tag_p = obj_tag_p();
 316     } else {
 317       _referrer = referrer;
 318       // record the context
 319       _referrer_hashmap = tag_map->hashmap();
 320       _referrer_entry = _referrer_hashmap->find(_referrer);
 321 
 322       // get object tag
 323       _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
 324       _referrer_tag_p = &_referrer_obj_tag;
 325 
 326       // get referrer class tag.
 327       _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
 328     }
 329   }
 330 
 331   ~TwoOopCallbackWrapper() {
 332     if (!is_reference_to_self()){
 333       post_callback_tag_update(_referrer,
 334                                _referrer_hashmap,
 335                                _referrer_entry,
 336                                _referrer_obj_tag);
 337     }
 338   }
 339 
 340   // address of referrer tag
 341   // (for a self reference this will return the same thing as obj_tag_p())
 342   inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
 343 
 344   // referrer's class tag
 345   inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
 346 };
 347 
 348 // tag an object
 349 //
 350 // This function is performance critical. If many threads attempt to tag objects
 351 // around the same time then it's possible that the Mutex associated with the
 352 // tag map will be a hot lock.
 353 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
 354   MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
 355 
 356   // SetTag should not post events because the JavaThread has to
 357   // transition to native for the callback and this cannot stop for
 358   // safepoints with the hashmap lock held.
 359   check_hashmap(/*post_events*/ false);
 360 
 361   // resolve the object
 362   oop o = JNIHandles::resolve_non_null(object);
 363 
 364   // see if the object is already tagged
 365   JvmtiTagMapTable* hashmap = _hashmap;
 366   JvmtiTagMapEntry* entry = hashmap->find(o);
 367 
 368   // if the object is not already tagged then we tag it
 369   if (entry == NULL) {
 370     if (tag != 0) {
 371       hashmap->add(o, tag);
 372     } else {
 373       // no-op
 374     }
 375   } else {
 376     // if the object is already tagged then we either update
 377     // the tag (if a new tag value has been provided)
 378     // or remove the object if the new tag value is 0.
 379     if (tag == 0) {
 380       hashmap->remove(o);
 381     } else {
 382       entry->set_tag(tag);
 383     }
 384   }
 385 }
 386 
 387 // get the tag for an object
 388 jlong JvmtiTagMap::get_tag(jobject object) {
 389   MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
 390 
 391   // GetTag should not post events because the JavaThread has to
 392   // transition to native for the callback and this cannot stop for
 393   // safepoints with the hashmap lock held.
 394   check_hashmap(/*post_events*/ false);
 395 
 396   // resolve the object
 397   oop o = JNIHandles::resolve_non_null(object);
 398 
 399   return tag_for(this, o);
 400 }
 401 
 402 
 403 // Helper class used to describe the static or instance fields of a class.
 404 // For each field it holds the field index (as defined by the JVMTI specification),
 405 // the field type, and the offset.
 406 
 407 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
 408  private:
 409   int _field_index;
 410   int _field_offset;
 411   char _field_type;
 412  public:
 413   ClassFieldDescriptor(int index, char type, int offset) :
 414     _field_index(index), _field_offset(offset), _field_type(type) {
 415   }
 416   int field_index()  const  { return _field_index; }
 417   char field_type()  const  { return _field_type; }
 418   int field_offset() const  { return _field_offset; }
 419 };
 420 
 421 class ClassFieldMap: public CHeapObj<mtInternal> {
 422  private:
 423   enum {
 424     initial_field_count = 5
 425   };
 426 
 427   // list of field descriptors
 428   GrowableArray<ClassFieldDescriptor*>* _fields;
 429 
 430   // constructor
 431   ClassFieldMap();
 432 
 433   // add a field
 434   void add(int index, char type, int offset);
 435 
 436  public:
 437   ~ClassFieldMap();
 438 
 439   // access
 440   int field_count()                     { return _fields->length(); }
 441   ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
 442 
 443   // functions to create maps of static or instance fields
 444   static ClassFieldMap* create_map_of_static_fields(Klass* k);
 445   static ClassFieldMap* create_map_of_instance_fields(oop obj);
 446 };
 447 
 448 ClassFieldMap::ClassFieldMap() {
 449   _fields = new (ResourceObj::C_HEAP, mtServiceability)
 450     GrowableArray<ClassFieldDescriptor*>(initial_field_count, mtServiceability);
 451 }
 452 
 453 ClassFieldMap::~ClassFieldMap() {
 454   for (int i=0; i<_fields->length(); i++) {
 455     delete _fields->at(i);
 456   }
 457   delete _fields;
 458 }
 459 
 460 void ClassFieldMap::add(int index, char type, int offset) {
 461   ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
 462   _fields->append(field);
 463 }
 464 
 465 // Returns a heap allocated ClassFieldMap to describe the static fields
 466 // of the given class.
 467 //
 468 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
 469   InstanceKlass* ik = InstanceKlass::cast(k);
 470 
 471   // create the field map
 472   ClassFieldMap* field_map = new ClassFieldMap();
 473 
 474   FilteredFieldStream f(ik, false, false);
 475   int max_field_index = f.field_count()-1;
 476 
 477   int index = 0;
 478   for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) {
 479     // ignore instance fields
 480     if (!fld.access_flags().is_static()) {
 481       continue;
 482     }
 483     field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset());
 484   }
 485   return field_map;
 486 }
 487 
 488 // Returns a heap allocated ClassFieldMap to describe the instance fields
 489 // of the given class. All instance fields are included (this means public
 490 // and private fields declared in superclasses and superinterfaces too).
 491 //
 492 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
 493   InstanceKlass* ik = InstanceKlass::cast(obj->klass());
 494 
 495   // create the field map
 496   ClassFieldMap* field_map = new ClassFieldMap();
 497 
 498   FilteredFieldStream f(ik, false, false);
 499 
 500   int max_field_index = f.field_count()-1;
 501 
 502   int index = 0;
 503   for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) {
 504     // ignore static fields
 505     if (fld.access_flags().is_static()) {
 506       continue;
 507     }
 508     field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset());
 509   }
 510 
 511   return field_map;
 512 }
 513 
 514 // Helper class used to cache a ClassFileMap for the instance fields of
 515 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
 516 // heap iteration and avoid creating a field map for each object in the heap
 517 // (only need to create the map when the first instance of a class is encountered).
 518 //
 519 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
 520  private:
 521    enum {
 522      initial_class_count = 200
 523    };
 524   ClassFieldMap* _field_map;
 525 
 526   ClassFieldMap* field_map() const          { return _field_map; }
 527 
 528   JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
 529   ~JvmtiCachedClassFieldMap();
 530 
 531   static GrowableArray<InstanceKlass*>* _class_list;
 532   static void add_to_class_list(InstanceKlass* ik);
 533 
 534  public:
 535   // returns the field map for a given object (returning map cached
 536   // by InstanceKlass if possible
 537   static ClassFieldMap* get_map_of_instance_fields(oop obj);
 538 
 539   // removes the field map from all instanceKlasses - should be
 540   // called before VM operation completes
 541   static void clear_cache();
 542 
 543   // returns the number of ClassFieldMap cached by instanceKlasses
 544   static int cached_field_map_count();
 545 };
 546 
 547 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
 548 
 549 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
 550   _field_map = field_map;
 551 }
 552 
 553 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
 554   if (_field_map != NULL) {
 555     delete _field_map;
 556   }
 557 }
 558 
 559 // Marker class to ensure that the class file map cache is only used in a defined
 560 // scope.
 561 class ClassFieldMapCacheMark : public StackObj {
 562  private:
 563    static bool _is_active;
 564  public:
 565    ClassFieldMapCacheMark() {
 566      assert(Thread::current()->is_VM_thread(), "must be VMThread");
 567      assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
 568      assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
 569      _is_active = true;
 570    }
 571    ~ClassFieldMapCacheMark() {
 572      JvmtiCachedClassFieldMap::clear_cache();
 573      _is_active = false;
 574    }
 575    static bool is_active() { return _is_active; }
 576 };
 577 
 578 bool ClassFieldMapCacheMark::_is_active;
 579 
 580 
 581 // record that the given InstanceKlass is caching a field map
 582 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
 583   if (_class_list == NULL) {
 584     _class_list = new (ResourceObj::C_HEAP, mtServiceability)
 585       GrowableArray<InstanceKlass*>(initial_class_count, mtServiceability);
 586   }
 587   _class_list->push(ik);
 588 }
 589 
 590 // returns the instance field map for the given object
 591 // (returns field map cached by the InstanceKlass if possible)
 592 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
 593   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 594   assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
 595 
 596   Klass* k = obj->klass();
 597   InstanceKlass* ik = InstanceKlass::cast(k);
 598 
 599   // return cached map if possible
 600   JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 601   if (cached_map != NULL) {
 602     assert(cached_map->field_map() != NULL, "missing field list");
 603     return cached_map->field_map();
 604   } else {
 605     ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
 606     cached_map = new JvmtiCachedClassFieldMap(field_map);
 607     ik->set_jvmti_cached_class_field_map(cached_map);
 608     add_to_class_list(ik);
 609     return field_map;
 610   }
 611 }
 612 
 613 // remove the fields maps cached from all instanceKlasses
 614 void JvmtiCachedClassFieldMap::clear_cache() {
 615   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 616   if (_class_list != NULL) {
 617     for (int i = 0; i < _class_list->length(); i++) {
 618       InstanceKlass* ik = _class_list->at(i);
 619       JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 620       assert(cached_map != NULL, "should not be NULL");
 621       ik->set_jvmti_cached_class_field_map(NULL);
 622       delete cached_map;  // deletes the encapsulated field map
 623     }
 624     delete _class_list;
 625     _class_list = NULL;
 626   }
 627 }
 628 
 629 // returns the number of ClassFieldMap cached by instanceKlasses
 630 int JvmtiCachedClassFieldMap::cached_field_map_count() {
 631   return (_class_list == NULL) ? 0 : _class_list->length();
 632 }
 633 
 634 // helper function to indicate if an object is filtered by its tag or class tag
 635 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
 636                                               jlong klass_tag,
 637                                               int heap_filter) {
 638   // apply the heap filter
 639   if (obj_tag != 0) {
 640     // filter out tagged objects
 641     if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
 642   } else {
 643     // filter out untagged objects
 644     if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
 645   }
 646   if (klass_tag != 0) {
 647     // filter out objects with tagged classes
 648     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
 649   } else {
 650     // filter out objects with untagged classes.
 651     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
 652   }
 653   return false;
 654 }
 655 
 656 // helper function to indicate if an object is filtered by a klass filter
 657 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) {
 658   if (klass_filter != NULL) {
 659     if (obj->klass() != klass_filter) {
 660       return true;
 661     }
 662   }
 663   return false;
 664 }
 665 
 666 // helper function to tell if a field is a primitive field or not
 667 static inline bool is_primitive_field_type(char type) {
 668   return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY);
 669 }
 670 
 671 // helper function to copy the value from location addr to jvalue.
 672 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
 673   switch (value_type) {
 674     case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
 675     case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
 676     case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
 677     case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
 678     case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
 679     case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
 680     case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
 681     case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
 682     default: ShouldNotReachHere();
 683   }
 684 }
 685 
 686 // helper function to invoke string primitive value callback
 687 // returns visit control flags
 688 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
 689                                          CallbackWrapper* wrapper,
 690                                          oop str,
 691                                          void* user_data)
 692 {
 693   assert(str->klass() == vmClasses::String_klass(), "not a string");
 694 
 695   typeArrayOop s_value = java_lang_String::value(str);
 696 
 697   // JDK-6584008: the value field may be null if a String instance is
 698   // partially constructed.
 699   if (s_value == NULL) {
 700     return 0;
 701   }
 702   // get the string value and length
 703   // (string value may be offset from the base)
 704   int s_len = java_lang_String::length(str);
 705   bool is_latin1 = java_lang_String::is_latin1(str);
 706   jchar* value;
 707   if (s_len > 0) {
 708     if (!is_latin1) {
 709       value = s_value->char_at_addr(0);
 710     } else {
 711       // Inflate latin1 encoded string to UTF16
 712       jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
 713       for (int i = 0; i < s_len; i++) {
 714         buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
 715       }
 716       value = &buf[0];
 717     }
 718   } else {
 719     // Don't use char_at_addr(0) if length is 0
 720     value = (jchar*) s_value->base(T_CHAR);
 721   }
 722 
 723   // invoke the callback
 724   jint res = (*cb)(wrapper->klass_tag(),
 725                    wrapper->obj_size(),
 726                    wrapper->obj_tag_p(),
 727                    value,
 728                    (jint)s_len,
 729                    user_data);
 730 
 731   if (is_latin1 && s_len > 0) {
 732     FREE_C_HEAP_ARRAY(jchar, value);
 733   }
 734   return res;
 735 }
 736 
 737 // helper function to invoke string primitive value callback
 738 // returns visit control flags
 739 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
 740                                                   CallbackWrapper* wrapper,
 741                                                   oop obj,
 742                                                   void* user_data)
 743 {
 744   assert(obj->is_typeArray(), "not a primitive array");
 745 
 746   // get base address of first element
 747   typeArrayOop array = typeArrayOop(obj);
 748   BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
 749   void* elements = array->base(type);
 750 
 751   // jvmtiPrimitiveType is defined so this mapping is always correct
 752   jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
 753 
 754   return (*cb)(wrapper->klass_tag(),
 755                wrapper->obj_size(),
 756                wrapper->obj_tag_p(),
 757                (jint)array->length(),
 758                elem_type,
 759                elements,
 760                user_data);
 761 }
 762 
 763 // helper function to invoke the primitive field callback for all static fields
 764 // of a given class
 765 static jint invoke_primitive_field_callback_for_static_fields
 766   (CallbackWrapper* wrapper,
 767    oop obj,
 768    jvmtiPrimitiveFieldCallback cb,
 769    void* user_data)
 770 {
 771   // for static fields only the index will be set
 772   static jvmtiHeapReferenceInfo reference_info = { 0 };
 773 
 774   assert(obj->klass() == vmClasses::Class_klass(), "not a class");
 775   if (java_lang_Class::is_primitive(obj)) {
 776     return 0;
 777   }
 778   Klass* klass = java_lang_Class::as_Klass(obj);
 779 
 780   // ignore classes for object and type arrays
 781   if (!klass->is_instance_klass()) {
 782     return 0;
 783   }
 784 
 785   // ignore classes which aren't linked yet
 786   InstanceKlass* ik = InstanceKlass::cast(klass);
 787   if (!ik->is_linked()) {
 788     return 0;
 789   }
 790 
 791   // get the field map
 792   ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
 793 
 794   // invoke the callback for each static primitive field
 795   for (int i=0; i<field_map->field_count(); i++) {
 796     ClassFieldDescriptor* field = field_map->field_at(i);
 797 
 798     // ignore non-primitive fields
 799     char type = field->field_type();
 800     if (!is_primitive_field_type(type)) {
 801       continue;
 802     }
 803     // one-to-one mapping
 804     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
 805 
 806     // get offset and field value
 807     int offset = field->field_offset();
 808     address addr = cast_from_oop<address>(klass->java_mirror()) + offset;
 809     jvalue value;
 810     copy_to_jvalue(&value, addr, value_type);
 811 
 812     // field index
 813     reference_info.field.index = field->field_index();
 814 
 815     // invoke the callback
 816     jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
 817                      &reference_info,
 818                      wrapper->klass_tag(),
 819                      wrapper->obj_tag_p(),
 820                      value,
 821                      value_type,
 822                      user_data);
 823     if (res & JVMTI_VISIT_ABORT) {
 824       delete field_map;
 825       return res;
 826     }
 827   }
 828 
 829   delete field_map;
 830   return 0;
 831 }
 832 
 833 // helper function to invoke the primitive field callback for all instance fields
 834 // of a given object
 835 static jint invoke_primitive_field_callback_for_instance_fields(
 836   CallbackWrapper* wrapper,
 837   oop obj,
 838   jvmtiPrimitiveFieldCallback cb,
 839   void* user_data)
 840 {
 841   // for instance fields only the index will be set
 842   static jvmtiHeapReferenceInfo reference_info = { 0 };
 843 
 844   // get the map of the instance fields
 845   ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
 846 
 847   // invoke the callback for each instance primitive field
 848   for (int i=0; i<fields->field_count(); i++) {
 849     ClassFieldDescriptor* field = fields->field_at(i);
 850 
 851     // ignore non-primitive fields
 852     char type = field->field_type();
 853     if (!is_primitive_field_type(type)) {
 854       continue;
 855     }
 856     // one-to-one mapping
 857     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
 858 
 859     // get offset and field value
 860     int offset = field->field_offset();
 861     address addr = cast_from_oop<address>(obj) + offset;
 862     jvalue value;
 863     copy_to_jvalue(&value, addr, value_type);
 864 
 865     // field index
 866     reference_info.field.index = field->field_index();
 867 
 868     // invoke the callback
 869     jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
 870                      &reference_info,
 871                      wrapper->klass_tag(),
 872                      wrapper->obj_tag_p(),
 873                      value,
 874                      value_type,
 875                      user_data);
 876     if (res & JVMTI_VISIT_ABORT) {
 877       return res;
 878     }
 879   }
 880   return 0;
 881 }
 882 
 883 
 884 // VM operation to iterate over all objects in the heap (both reachable
 885 // and unreachable)
 886 class VM_HeapIterateOperation: public VM_Operation {
 887  private:
 888   ObjectClosure* _blk;
 889  public:
 890   VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
 891 
 892   VMOp_Type type() const { return VMOp_HeapIterateOperation; }
 893   void doit() {
 894     // allows class files maps to be cached during iteration
 895     ClassFieldMapCacheMark cm;
 896 
 897     JvmtiTagMap::check_hashmaps_for_heapwalk();
 898 
 899     // make sure that heap is parsable (fills TLABs with filler objects)
 900     Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
 901 
 902     // Verify heap before iteration - if the heap gets corrupted then
 903     // JVMTI's IterateOverHeap will crash.
 904     if (VerifyBeforeIteration) {
 905       Universe::verify();
 906     }
 907 
 908     // do the iteration
 909     Universe::heap()->object_iterate(_blk);
 910   }
 911 
 912 };
 913 
 914 
 915 // An ObjectClosure used to support the deprecated IterateOverHeap and
 916 // IterateOverInstancesOfClass functions
 917 class IterateOverHeapObjectClosure: public ObjectClosure {
 918  private:
 919   JvmtiTagMap* _tag_map;
 920   Klass* _klass;
 921   jvmtiHeapObjectFilter _object_filter;
 922   jvmtiHeapObjectCallback _heap_object_callback;
 923   const void* _user_data;
 924 
 925   // accessors
 926   JvmtiTagMap* tag_map() const                    { return _tag_map; }
 927   jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
 928   jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
 929   Klass* klass() const                            { return _klass; }
 930   const void* user_data() const                   { return _user_data; }
 931 
 932   // indicates if iteration has been aborted
 933   bool _iteration_aborted;
 934   bool is_iteration_aborted() const               { return _iteration_aborted; }
 935   void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
 936 
 937  public:
 938   IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
 939                                Klass* klass,
 940                                jvmtiHeapObjectFilter object_filter,
 941                                jvmtiHeapObjectCallback heap_object_callback,
 942                                const void* user_data) :
 943     _tag_map(tag_map),
 944     _klass(klass),
 945     _object_filter(object_filter),
 946     _heap_object_callback(heap_object_callback),
 947     _user_data(user_data),
 948     _iteration_aborted(false)
 949   {
 950   }
 951 
 952   void do_object(oop o);
 953 };
 954 
 955 // invoked for each object in the heap
 956 void IterateOverHeapObjectClosure::do_object(oop o) {
 957   // check if iteration has been halted
 958   if (is_iteration_aborted()) return;
 959 
 960   // instanceof check when filtering by klass
 961   if (klass() != NULL && !o->is_a(klass())) {
 962     return;
 963   }
 964 
 965   // skip if object is a dormant shared object whose mirror hasn't been loaded
 966   if (o != NULL && o->klass()->java_mirror() == NULL) {
 967     log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(o),
 968                          o->klass()->external_name());
 969     return;
 970   }
 971 
 972   // prepare for the calllback
 973   CallbackWrapper wrapper(tag_map(), o);
 974 
 975   // if the object is tagged and we're only interested in untagged objects
 976   // then don't invoke the callback. Similiarly, if the object is untagged
 977   // and we're only interested in tagged objects we skip the callback.
 978   if (wrapper.obj_tag() != 0) {
 979     if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
 980   } else {
 981     if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
 982   }
 983 
 984   // invoke the agent's callback
 985   jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
 986                                                        wrapper.obj_size(),
 987                                                        wrapper.obj_tag_p(),
 988                                                        (void*)user_data());
 989   if (control == JVMTI_ITERATION_ABORT) {
 990     set_iteration_aborted(true);
 991   }
 992 }
 993 
 994 // An ObjectClosure used to support the IterateThroughHeap function
 995 class IterateThroughHeapObjectClosure: public ObjectClosure {
 996  private:
 997   JvmtiTagMap* _tag_map;
 998   Klass* _klass;
 999   int _heap_filter;
1000   const jvmtiHeapCallbacks* _callbacks;
1001   const void* _user_data;
1002 
1003   // accessor functions
1004   JvmtiTagMap* tag_map() const                     { return _tag_map; }
1005   int heap_filter() const                          { return _heap_filter; }
1006   const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1007   Klass* klass() const                             { return _klass; }
1008   const void* user_data() const                    { return _user_data; }
1009 
1010   // indicates if the iteration has been aborted
1011   bool _iteration_aborted;
1012   bool is_iteration_aborted() const                { return _iteration_aborted; }
1013 
1014   // used to check the visit control flags. If the abort flag is set
1015   // then we set the iteration aborted flag so that the iteration completes
1016   // without processing any further objects
1017   bool check_flags_for_abort(jint flags) {
1018     bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1019     if (is_abort) {
1020       _iteration_aborted = true;
1021     }
1022     return is_abort;
1023   }
1024 
1025  public:
1026   IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1027                                   Klass* klass,
1028                                   int heap_filter,
1029                                   const jvmtiHeapCallbacks* heap_callbacks,
1030                                   const void* user_data) :
1031     _tag_map(tag_map),
1032     _klass(klass),
1033     _heap_filter(heap_filter),
1034     _callbacks(heap_callbacks),
1035     _user_data(user_data),
1036     _iteration_aborted(false)
1037   {
1038   }
1039 
1040   void do_object(oop o);
1041 };
1042 
1043 // invoked for each object in the heap
1044 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1045   // check if iteration has been halted
1046   if (is_iteration_aborted()) return;
1047 
1048   // apply class filter
1049   if (is_filtered_by_klass_filter(obj, klass())) return;
1050 
1051   // skip if object is a dormant shared object whose mirror hasn't been loaded
1052   if (obj != NULL &&   obj->klass()->java_mirror() == NULL) {
1053     log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(obj),
1054                          obj->klass()->external_name());
1055     return;
1056   }
1057 
1058   // prepare for callback
1059   CallbackWrapper wrapper(tag_map(), obj);
1060 
1061   // check if filtered by the heap filter
1062   if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1063     return;
1064   }
1065 
1066   // for arrays we need the length, otherwise -1
1067   bool is_array = obj->is_array();
1068   int len = is_array ? arrayOop(obj)->length() : -1;
1069 
1070   // invoke the object callback (if callback is provided)
1071   if (callbacks()->heap_iteration_callback != NULL) {
1072     jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1073     jint res = (*cb)(wrapper.klass_tag(),
1074                      wrapper.obj_size(),
1075                      wrapper.obj_tag_p(),
1076                      (jint)len,
1077                      (void*)user_data());
1078     if (check_flags_for_abort(res)) return;
1079   }
1080 
1081   // for objects and classes we report primitive fields if callback provided
1082   if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1083     jint res;
1084     jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1085     if (obj->klass() == vmClasses::Class_klass()) {
1086       res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1087                                                                     obj,
1088                                                                     cb,
1089                                                                     (void*)user_data());
1090     } else {
1091       res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1092                                                                       obj,
1093                                                                       cb,
1094                                                                       (void*)user_data());
1095     }
1096     if (check_flags_for_abort(res)) return;
1097   }
1098 
1099   // string callback
1100   if (!is_array &&
1101       callbacks()->string_primitive_value_callback != NULL &&
1102       obj->klass() == vmClasses::String_klass()) {
1103     jint res = invoke_string_value_callback(
1104                 callbacks()->string_primitive_value_callback,
1105                 &wrapper,
1106                 obj,
1107                 (void*)user_data() );
1108     if (check_flags_for_abort(res)) return;
1109   }
1110 
1111   // array callback
1112   if (is_array &&
1113       callbacks()->array_primitive_value_callback != NULL &&
1114       obj->is_typeArray()) {
1115     jint res = invoke_array_primitive_value_callback(
1116                callbacks()->array_primitive_value_callback,
1117                &wrapper,
1118                obj,
1119                (void*)user_data() );
1120     if (check_flags_for_abort(res)) return;
1121   }
1122 };
1123 
1124 
1125 // Deprecated function to iterate over all objects in the heap
1126 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1127                                     Klass* klass,
1128                                     jvmtiHeapObjectCallback heap_object_callback,
1129                                     const void* user_data)
1130 {
1131   // EA based optimizations on tagged objects are already reverted.
1132   EscapeBarrier eb(object_filter == JVMTI_HEAP_OBJECT_UNTAGGED ||
1133                    object_filter == JVMTI_HEAP_OBJECT_EITHER,
1134                    JavaThread::current());
1135   eb.deoptimize_objects_all_threads();
1136   MutexLocker ml(Heap_lock);
1137   IterateOverHeapObjectClosure blk(this,
1138                                    klass,
1139                                    object_filter,
1140                                    heap_object_callback,
1141                                    user_data);
1142   VM_HeapIterateOperation op(&blk);
1143   VMThread::execute(&op);
1144 }
1145 
1146 
1147 // Iterates over all objects in the heap
1148 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1149                                        Klass* klass,
1150                                        const jvmtiHeapCallbacks* callbacks,
1151                                        const void* user_data)
1152 {
1153   // EA based optimizations on tagged objects are already reverted.
1154   EscapeBarrier eb(!(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED), JavaThread::current());
1155   eb.deoptimize_objects_all_threads();
1156   MutexLocker ml(Heap_lock);
1157   IterateThroughHeapObjectClosure blk(this,
1158                                       klass,
1159                                       heap_filter,
1160                                       callbacks,
1161                                       user_data);
1162   VM_HeapIterateOperation op(&blk);
1163   VMThread::execute(&op);
1164 }
1165 
1166 void JvmtiTagMap::remove_dead_entries_locked(bool post_object_free) {
1167   assert(is_locked(), "precondition");
1168   if (_needs_cleaning) {
1169     // Recheck whether to post object free events under the lock.
1170     post_object_free = post_object_free && env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
1171     log_info(jvmti, table)("TagMap table needs cleaning%s",
1172                            (post_object_free ? " and posting" : ""));
1173     hashmap()->remove_dead_entries(env(), post_object_free);
1174     _needs_cleaning = false;
1175   }
1176 }
1177 
1178 void JvmtiTagMap::remove_dead_entries(bool post_object_free) {
1179   MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1180   remove_dead_entries_locked(post_object_free);
1181 }
1182 
1183 class VM_JvmtiPostObjectFree: public VM_Operation {
1184   JvmtiTagMap* _tag_map;
1185  public:
1186   VM_JvmtiPostObjectFree(JvmtiTagMap* tag_map) : _tag_map(tag_map) {}
1187   VMOp_Type type() const { return VMOp_Cleanup; }
1188   void doit() {
1189     _tag_map->remove_dead_entries(true /* post_object_free */);
1190   }
1191 
1192   // Doesn't need a safepoint, just the VM thread
1193   virtual bool evaluate_at_safepoint() const { return false; }
1194 };
1195 
1196 // PostObjectFree can't be called by JavaThread, so call it from the VM thread.
1197 void JvmtiTagMap::post_dead_objects_on_vm_thread() {
1198   VM_JvmtiPostObjectFree op(this);
1199   VMThread::execute(&op);
1200 }
1201 
1202 void JvmtiTagMap::flush_object_free_events() {
1203   assert_not_at_safepoint();
1204   if (env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1205     {
1206       MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1207       if (!_needs_cleaning || is_empty()) {
1208         _needs_cleaning = false;
1209         return;
1210       }
1211     } // Drop the lock so we can do the cleaning on the VM thread.
1212     // Needs both cleaning and event posting (up to some other thread
1213     // getting there first after we dropped the lock).
1214     post_dead_objects_on_vm_thread();
1215   } else {
1216     remove_dead_entries(false);
1217   }
1218 }
1219 
1220 // support class for get_objects_with_tags
1221 
1222 class TagObjectCollector : public JvmtiTagMapEntryClosure {
1223  private:
1224   JvmtiEnv* _env;
1225   JavaThread* _thread;
1226   jlong* _tags;
1227   jint _tag_count;
1228   bool _some_dead_found;
1229 
1230   GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1231   GrowableArray<uint64_t>* _tag_results;    // collected tags
1232 
1233  public:
1234   TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) :
1235     _env(env),
1236     _thread(JavaThread::current()),
1237     _tags((jlong*)tags),
1238     _tag_count(tag_count),
1239     _some_dead_found(false),
1240     _object_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<jobject>(1, mtServiceability)),
1241     _tag_results(new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<uint64_t>(1, mtServiceability)) { }
1242 
1243   ~TagObjectCollector() {
1244     delete _object_results;
1245     delete _tag_results;
1246   }
1247 
1248   bool some_dead_found() const { return _some_dead_found; }
1249 
1250   // for each tagged object check if the tag value matches
1251   // - if it matches then we create a JNI local reference to the object
1252   // and record the reference and tag value.
1253   //
1254   void do_entry(JvmtiTagMapEntry* entry) {
1255     for (int i=0; i<_tag_count; i++) {
1256       if (_tags[i] == entry->tag()) {
1257         // The reference in this tag map could be the only (implicitly weak)
1258         // reference to that object. If we hand it out, we need to keep it live wrt
1259         // SATB marking similar to other j.l.ref.Reference referents. This is
1260         // achieved by using a phantom load in the object() accessor.
1261         oop o = entry->object();
1262         if (o == NULL) {
1263           _some_dead_found = true;
1264           // skip this whole entry
1265           return;
1266         }
1267         assert(o != NULL && Universe::heap()->is_in(o), "sanity check");
1268         jobject ref = JNIHandles::make_local(_thread, o);
1269         _object_results->append(ref);
1270         _tag_results->append((uint64_t)entry->tag());
1271       }
1272     }
1273   }
1274 
1275   // return the results from the collection
1276   //
1277   jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1278     jvmtiError error;
1279     int count = _object_results->length();
1280     assert(count >= 0, "sanity check");
1281 
1282     // if object_result_ptr is not NULL then allocate the result and copy
1283     // in the object references.
1284     if (object_result_ptr != NULL) {
1285       error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1286       if (error != JVMTI_ERROR_NONE) {
1287         return error;
1288       }
1289       for (int i=0; i<count; i++) {
1290         (*object_result_ptr)[i] = _object_results->at(i);
1291       }
1292     }
1293 
1294     // if tag_result_ptr is not NULL then allocate the result and copy
1295     // in the tag values.
1296     if (tag_result_ptr != NULL) {
1297       error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1298       if (error != JVMTI_ERROR_NONE) {
1299         if (object_result_ptr != NULL) {
1300           _env->Deallocate((unsigned char*)object_result_ptr);
1301         }
1302         return error;
1303       }
1304       for (int i=0; i<count; i++) {
1305         (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1306       }
1307     }
1308 
1309     *count_ptr = count;
1310     return JVMTI_ERROR_NONE;
1311   }
1312 };
1313 
1314 // return the list of objects with the specified tags
1315 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1316   jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1317 
1318   TagObjectCollector collector(env(), tags, count);
1319   {
1320     // iterate over all tagged objects
1321     MutexLocker ml(lock(), Mutex::_no_safepoint_check_flag);
1322     // Can't post ObjectFree events here from a JavaThread, so this
1323     // will race with the gc_notification thread in the tiny
1324     // window where the object is not marked but hasn't been notified that
1325     // it is collected yet.
1326     entry_iterate(&collector);
1327   }
1328   if (collector.some_dead_found() && env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
1329     post_dead_objects_on_vm_thread();
1330   }
1331   return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1332 }
1333 
1334 
1335 // ObjectMarker is used to support the marking objects when walking the
1336 // heap.
1337 //
1338 // This implementation uses the existing mark bits in an object for
1339 // marking. Objects that are marked must later have their headers restored.
1340 // As most objects are unlocked and don't have their identity hash computed
1341 // we don't have to save their headers. Instead we save the headers that
1342 // are "interesting". Later when the headers are restored this implementation
1343 // restores all headers to their initial value and then restores the few
1344 // objects that had interesting headers.
1345 //
1346 // Future work: This implementation currently uses growable arrays to save
1347 // the oop and header of interesting objects. As an optimization we could
1348 // use the same technique as the GC and make use of the unused area
1349 // between top() and end().
1350 //
1351 
1352 // An ObjectClosure used to restore the mark bits of an object
1353 class RestoreMarksClosure : public ObjectClosure {
1354  public:
1355   void do_object(oop o) {
1356     if (o != NULL) {
1357       markWord mark = o->mark();
1358       if (mark.is_marked()) {
1359         o->init_mark();
1360       }
1361     }
1362   }
1363 };
1364 
1365 // ObjectMarker provides the mark and visited functions
1366 class ObjectMarker : AllStatic {
1367  private:
1368   // saved headers
1369   static GrowableArray<oop>* _saved_oop_stack;
1370   static GrowableArray<markWord>* _saved_mark_stack;
1371   static bool _needs_reset;                  // do we need to reset mark bits?
1372 
1373  public:
1374   static void init();                       // initialize
1375   static void done();                       // clean-up
1376 
1377   static inline void mark(oop o);           // mark an object
1378   static inline bool visited(oop o);        // check if object has been visited
1379 
1380   static inline bool needs_reset()            { return _needs_reset; }
1381   static inline void set_needs_reset(bool v)  { _needs_reset = v; }
1382 };
1383 
1384 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1385 GrowableArray<markWord>* ObjectMarker::_saved_mark_stack = NULL;
1386 bool ObjectMarker::_needs_reset = true;  // need to reset mark bits by default
1387 
1388 // initialize ObjectMarker - prepares for object marking
1389 void ObjectMarker::init() {
1390   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1391   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
1392 
1393   // prepare heap for iteration
1394   Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1395 
1396   // create stacks for interesting headers
1397   _saved_mark_stack = new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<markWord>(4000, mtServiceability);
1398   _saved_oop_stack = new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<oop>(4000, mtServiceability);
1399 }
1400 
1401 // Object marking is done so restore object headers
1402 void ObjectMarker::done() {
1403   // iterate over all objects and restore the mark bits to
1404   // their initial value
1405   RestoreMarksClosure blk;
1406   if (needs_reset()) {
1407     Universe::heap()->object_iterate(&blk);
1408   } else {
1409     // We don't need to reset mark bits on this call, but reset the
1410     // flag to the default for the next call.
1411     set_needs_reset(true);
1412   }
1413 
1414   // now restore the interesting headers
1415   for (int i = 0; i < _saved_oop_stack->length(); i++) {
1416     oop o = _saved_oop_stack->at(i);
1417     markWord mark = _saved_mark_stack->at(i);
1418     o->set_mark(mark);
1419   }
1420 
1421   // free the stacks
1422   delete _saved_oop_stack;
1423   delete _saved_mark_stack;
1424 }
1425 
1426 // mark an object
1427 inline void ObjectMarker::mark(oop o) {
1428   assert(Universe::heap()->is_in(o), "sanity check");
1429   assert(!o->mark().is_marked(), "should only mark an object once");
1430 
1431   // object's mark word
1432   markWord mark = o->mark();
1433 
1434   if (o->mark_must_be_preserved(mark)) {
1435     _saved_mark_stack->push(mark);
1436     _saved_oop_stack->push(o);
1437   }
1438 
1439   // mark the object
1440   o->set_mark(markWord::prototype().set_marked());
1441 }
1442 
1443 // return true if object is marked
1444 inline bool ObjectMarker::visited(oop o) {
1445   return o->mark().is_marked();
1446 }
1447 
1448 // Stack allocated class to help ensure that ObjectMarker is used
1449 // correctly. Constructor initializes ObjectMarker, destructor calls
1450 // ObjectMarker's done() function to restore object headers.
1451 class ObjectMarkerController : public StackObj {
1452  public:
1453   ObjectMarkerController() {
1454     ObjectMarker::init();
1455   }
1456   ~ObjectMarkerController() {
1457     ObjectMarker::done();
1458   }
1459 };
1460 
1461 
1462 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1463 // (not performance critical as only used for roots)
1464 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1465   switch (kind) {
1466     case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1467     case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1468     case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1469     case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1470     case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1471     case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1472     default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1473   }
1474 }
1475 
1476 // Base class for all heap walk contexts. The base class maintains a flag
1477 // to indicate if the context is valid or not.
1478 class HeapWalkContext {
1479  private:
1480   bool _valid;
1481  public:
1482   HeapWalkContext(bool valid)                   { _valid = valid; }
1483   void invalidate()                             { _valid = false; }
1484   bool is_valid() const                         { return _valid; }
1485 };
1486 
1487 // A basic heap walk context for the deprecated heap walking functions.
1488 // The context for a basic heap walk are the callbacks and fields used by
1489 // the referrer caching scheme.
1490 class BasicHeapWalkContext: public HeapWalkContext {
1491  private:
1492   jvmtiHeapRootCallback _heap_root_callback;
1493   jvmtiStackReferenceCallback _stack_ref_callback;
1494   jvmtiObjectReferenceCallback _object_ref_callback;
1495 
1496   // used for caching
1497   oop _last_referrer;
1498   jlong _last_referrer_tag;
1499 
1500  public:
1501   BasicHeapWalkContext() : HeapWalkContext(false) { }
1502 
1503   BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1504                        jvmtiStackReferenceCallback stack_ref_callback,
1505                        jvmtiObjectReferenceCallback object_ref_callback) :
1506     HeapWalkContext(true),
1507     _heap_root_callback(heap_root_callback),
1508     _stack_ref_callback(stack_ref_callback),
1509     _object_ref_callback(object_ref_callback),
1510     _last_referrer(NULL),
1511     _last_referrer_tag(0) {
1512   }
1513 
1514   // accessors
1515   jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1516   jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1517   jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1518 
1519   oop last_referrer() const               { return _last_referrer; }
1520   void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1521   jlong last_referrer_tag() const         { return _last_referrer_tag; }
1522   void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1523 };
1524 
1525 // The advanced heap walk context for the FollowReferences functions.
1526 // The context is the callbacks, and the fields used for filtering.
1527 class AdvancedHeapWalkContext: public HeapWalkContext {
1528  private:
1529   jint _heap_filter;
1530   Klass* _klass_filter;
1531   const jvmtiHeapCallbacks* _heap_callbacks;
1532 
1533  public:
1534   AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1535 
1536   AdvancedHeapWalkContext(jint heap_filter,
1537                            Klass* klass_filter,
1538                            const jvmtiHeapCallbacks* heap_callbacks) :
1539     HeapWalkContext(true),
1540     _heap_filter(heap_filter),
1541     _klass_filter(klass_filter),
1542     _heap_callbacks(heap_callbacks) {
1543   }
1544 
1545   // accessors
1546   jint heap_filter() const         { return _heap_filter; }
1547   Klass* klass_filter() const      { return _klass_filter; }
1548 
1549   const jvmtiHeapReferenceCallback heap_reference_callback() const {
1550     return _heap_callbacks->heap_reference_callback;
1551   };
1552   const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1553     return _heap_callbacks->primitive_field_callback;
1554   }
1555   const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1556     return _heap_callbacks->array_primitive_value_callback;
1557   }
1558   const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1559     return _heap_callbacks->string_primitive_value_callback;
1560   }
1561 };
1562 
1563 // The CallbackInvoker is a class with static functions that the heap walk can call
1564 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1565 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1566 // mode is for the newer FollowReferences function which supports a lot of
1567 // additional callbacks.
1568 class CallbackInvoker : AllStatic {
1569  private:
1570   // heap walk styles
1571   enum { basic, advanced };
1572   static int _heap_walk_type;
1573   static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1574   static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1575 
1576   // context for basic style heap walk
1577   static BasicHeapWalkContext _basic_context;
1578   static BasicHeapWalkContext* basic_context() {
1579     assert(_basic_context.is_valid(), "invalid");
1580     return &_basic_context;
1581   }
1582 
1583   // context for advanced style heap walk
1584   static AdvancedHeapWalkContext _advanced_context;
1585   static AdvancedHeapWalkContext* advanced_context() {
1586     assert(_advanced_context.is_valid(), "invalid");
1587     return &_advanced_context;
1588   }
1589 
1590   // context needed for all heap walks
1591   static JvmtiTagMap* _tag_map;
1592   static const void* _user_data;
1593   static GrowableArray<oop>* _visit_stack;
1594 
1595   // accessors
1596   static JvmtiTagMap* tag_map()                        { return _tag_map; }
1597   static const void* user_data()                       { return _user_data; }
1598   static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1599 
1600   // if the object hasn't been visited then push it onto the visit stack
1601   // so that it will be visited later
1602   static inline bool check_for_visit(oop obj) {
1603     if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1604     return true;
1605   }
1606 
1607   // invoke basic style callbacks
1608   static inline bool invoke_basic_heap_root_callback
1609     (jvmtiHeapRootKind root_kind, oop obj);
1610   static inline bool invoke_basic_stack_ref_callback
1611     (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1612      int slot, oop obj);
1613   static inline bool invoke_basic_object_reference_callback
1614     (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1615 
1616   // invoke advanced style callbacks
1617   static inline bool invoke_advanced_heap_root_callback
1618     (jvmtiHeapReferenceKind ref_kind, oop obj);
1619   static inline bool invoke_advanced_stack_ref_callback
1620     (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1621      jmethodID method, jlocation bci, jint slot, oop obj);
1622   static inline bool invoke_advanced_object_reference_callback
1623     (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1624 
1625   // used to report the value of primitive fields
1626   static inline bool report_primitive_field
1627     (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1628 
1629  public:
1630   // initialize for basic mode
1631   static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1632                                              GrowableArray<oop>* visit_stack,
1633                                              const void* user_data,
1634                                              BasicHeapWalkContext context);
1635 
1636   // initialize for advanced mode
1637   static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1638                                                 GrowableArray<oop>* visit_stack,
1639                                                 const void* user_data,
1640                                                 AdvancedHeapWalkContext context);
1641 
1642    // functions to report roots
1643   static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1644   static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1645     jmethodID m, oop o);
1646   static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1647     jmethodID method, jlocation bci, jint slot, oop o);
1648 
1649   // functions to report references
1650   static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1651   static inline bool report_class_reference(oop referrer, oop referree);
1652   static inline bool report_class_loader_reference(oop referrer, oop referree);
1653   static inline bool report_signers_reference(oop referrer, oop referree);
1654   static inline bool report_protection_domain_reference(oop referrer, oop referree);
1655   static inline bool report_superclass_reference(oop referrer, oop referree);
1656   static inline bool report_interface_reference(oop referrer, oop referree);
1657   static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1658   static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1659   static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1660   static inline bool report_primitive_array_values(oop array);
1661   static inline bool report_string_value(oop str);
1662   static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1663   static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1664 };
1665 
1666 // statics
1667 int CallbackInvoker::_heap_walk_type;
1668 BasicHeapWalkContext CallbackInvoker::_basic_context;
1669 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1670 JvmtiTagMap* CallbackInvoker::_tag_map;
1671 const void* CallbackInvoker::_user_data;
1672 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1673 
1674 // initialize for basic heap walk (IterateOverReachableObjects et al)
1675 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1676                                                      GrowableArray<oop>* visit_stack,
1677                                                      const void* user_data,
1678                                                      BasicHeapWalkContext context) {
1679   _tag_map = tag_map;
1680   _visit_stack = visit_stack;
1681   _user_data = user_data;
1682   _basic_context = context;
1683   _advanced_context.invalidate();       // will trigger assertion if used
1684   _heap_walk_type = basic;
1685 }
1686 
1687 // initialize for advanced heap walk (FollowReferences)
1688 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1689                                                         GrowableArray<oop>* visit_stack,
1690                                                         const void* user_data,
1691                                                         AdvancedHeapWalkContext context) {
1692   _tag_map = tag_map;
1693   _visit_stack = visit_stack;
1694   _user_data = user_data;
1695   _advanced_context = context;
1696   _basic_context.invalidate();      // will trigger assertion if used
1697   _heap_walk_type = advanced;
1698 }
1699 
1700 
1701 // invoke basic style heap root callback
1702 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1703   // if we heap roots should be reported
1704   jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1705   if (cb == NULL) {
1706     return check_for_visit(obj);
1707   }
1708 
1709   CallbackWrapper wrapper(tag_map(), obj);
1710   jvmtiIterationControl control = (*cb)(root_kind,
1711                                         wrapper.klass_tag(),
1712                                         wrapper.obj_size(),
1713                                         wrapper.obj_tag_p(),
1714                                         (void*)user_data());
1715   // push root to visit stack when following references
1716   if (control == JVMTI_ITERATION_CONTINUE &&
1717       basic_context()->object_ref_callback() != NULL) {
1718     visit_stack()->push(obj);
1719   }
1720   return control != JVMTI_ITERATION_ABORT;
1721 }
1722 
1723 // invoke basic style stack ref callback
1724 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1725                                                              jlong thread_tag,
1726                                                              jint depth,
1727                                                              jmethodID method,
1728                                                              int slot,
1729                                                              oop obj) {
1730   // if we stack refs should be reported
1731   jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1732   if (cb == NULL) {
1733     return check_for_visit(obj);
1734   }
1735 
1736   CallbackWrapper wrapper(tag_map(), obj);
1737   jvmtiIterationControl control = (*cb)(root_kind,
1738                                         wrapper.klass_tag(),
1739                                         wrapper.obj_size(),
1740                                         wrapper.obj_tag_p(),
1741                                         thread_tag,
1742                                         depth,
1743                                         method,
1744                                         slot,
1745                                         (void*)user_data());
1746   // push root to visit stack when following references
1747   if (control == JVMTI_ITERATION_CONTINUE &&
1748       basic_context()->object_ref_callback() != NULL) {
1749     visit_stack()->push(obj);
1750   }
1751   return control != JVMTI_ITERATION_ABORT;
1752 }
1753 
1754 // invoke basic style object reference callback
1755 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
1756                                                                     oop referrer,
1757                                                                     oop referree,
1758                                                                     jint index) {
1759 
1760   BasicHeapWalkContext* context = basic_context();
1761 
1762   // callback requires the referrer's tag. If it's the same referrer
1763   // as the last call then we use the cached value.
1764   jlong referrer_tag;
1765   if (referrer == context->last_referrer()) {
1766     referrer_tag = context->last_referrer_tag();
1767   } else {
1768     referrer_tag = tag_for(tag_map(), referrer);
1769   }
1770 
1771   // do the callback
1772   CallbackWrapper wrapper(tag_map(), referree);
1773   jvmtiObjectReferenceCallback cb = context->object_ref_callback();
1774   jvmtiIterationControl control = (*cb)(ref_kind,
1775                                         wrapper.klass_tag(),
1776                                         wrapper.obj_size(),
1777                                         wrapper.obj_tag_p(),
1778                                         referrer_tag,
1779                                         index,
1780                                         (void*)user_data());
1781 
1782   // record referrer and referrer tag. For self-references record the
1783   // tag value from the callback as this might differ from referrer_tag.
1784   context->set_last_referrer(referrer);
1785   if (referrer == referree) {
1786     context->set_last_referrer_tag(*wrapper.obj_tag_p());
1787   } else {
1788     context->set_last_referrer_tag(referrer_tag);
1789   }
1790 
1791   if (control == JVMTI_ITERATION_CONTINUE) {
1792     return check_for_visit(referree);
1793   } else {
1794     return control != JVMTI_ITERATION_ABORT;
1795   }
1796 }
1797 
1798 // invoke advanced style heap root callback
1799 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
1800                                                                 oop obj) {
1801   AdvancedHeapWalkContext* context = advanced_context();
1802 
1803   // check that callback is provided
1804   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1805   if (cb == NULL) {
1806     return check_for_visit(obj);
1807   }
1808 
1809   // apply class filter
1810   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1811     return check_for_visit(obj);
1812   }
1813 
1814   // setup the callback wrapper
1815   CallbackWrapper wrapper(tag_map(), obj);
1816 
1817   // apply tag filter
1818   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1819                                  wrapper.klass_tag(),
1820                                  context->heap_filter())) {
1821     return check_for_visit(obj);
1822   }
1823 
1824   // for arrays we need the length, otherwise -1
1825   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1826 
1827   // invoke the callback
1828   jint res  = (*cb)(ref_kind,
1829                     NULL, // referrer info
1830                     wrapper.klass_tag(),
1831                     0,    // referrer_class_tag is 0 for heap root
1832                     wrapper.obj_size(),
1833                     wrapper.obj_tag_p(),
1834                     NULL, // referrer_tag_p
1835                     len,
1836                     (void*)user_data());
1837   if (res & JVMTI_VISIT_ABORT) {
1838     return false;// referrer class tag
1839   }
1840   if (res & JVMTI_VISIT_OBJECTS) {
1841     check_for_visit(obj);
1842   }
1843   return true;
1844 }
1845 
1846 // report a reference from a thread stack to an object
1847 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
1848                                                                 jlong thread_tag,
1849                                                                 jlong tid,
1850                                                                 int depth,
1851                                                                 jmethodID method,
1852                                                                 jlocation bci,
1853                                                                 jint slot,
1854                                                                 oop obj) {
1855   AdvancedHeapWalkContext* context = advanced_context();
1856 
1857   // check that callback is provider
1858   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1859   if (cb == NULL) {
1860     return check_for_visit(obj);
1861   }
1862 
1863   // apply class filter
1864   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1865     return check_for_visit(obj);
1866   }
1867 
1868   // setup the callback wrapper
1869   CallbackWrapper wrapper(tag_map(), obj);
1870 
1871   // apply tag filter
1872   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1873                                  wrapper.klass_tag(),
1874                                  context->heap_filter())) {
1875     return check_for_visit(obj);
1876   }
1877 
1878   // setup the referrer info
1879   jvmtiHeapReferenceInfo reference_info;
1880   reference_info.stack_local.thread_tag = thread_tag;
1881   reference_info.stack_local.thread_id = tid;
1882   reference_info.stack_local.depth = depth;
1883   reference_info.stack_local.method = method;
1884   reference_info.stack_local.location = bci;
1885   reference_info.stack_local.slot = slot;
1886 
1887   // for arrays we need the length, otherwise -1
1888   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1889 
1890   // call into the agent
1891   int res = (*cb)(ref_kind,
1892                   &reference_info,
1893                   wrapper.klass_tag(),
1894                   0,    // referrer_class_tag is 0 for heap root (stack)
1895                   wrapper.obj_size(),
1896                   wrapper.obj_tag_p(),
1897                   NULL, // referrer_tag is 0 for root
1898                   len,
1899                   (void*)user_data());
1900 
1901   if (res & JVMTI_VISIT_ABORT) {
1902     return false;
1903   }
1904   if (res & JVMTI_VISIT_OBJECTS) {
1905     check_for_visit(obj);
1906   }
1907   return true;
1908 }
1909 
1910 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
1911 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
1912 #define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
1913                       | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
1914                       | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
1915                       | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
1916                       | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
1917                       | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
1918 
1919 // invoke the object reference callback to report a reference
1920 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
1921                                                                        oop referrer,
1922                                                                        oop obj,
1923                                                                        jint index)
1924 {
1925   // field index is only valid field in reference_info
1926   static jvmtiHeapReferenceInfo reference_info = { 0 };
1927 
1928   AdvancedHeapWalkContext* context = advanced_context();
1929 
1930   // check that callback is provider
1931   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
1932   if (cb == NULL) {
1933     return check_for_visit(obj);
1934   }
1935 
1936   // apply class filter
1937   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
1938     return check_for_visit(obj);
1939   }
1940 
1941   // setup the callback wrapper
1942   TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
1943 
1944   // apply tag filter
1945   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
1946                                  wrapper.klass_tag(),
1947                                  context->heap_filter())) {
1948     return check_for_visit(obj);
1949   }
1950 
1951   // field index is only valid field in reference_info
1952   reference_info.field.index = index;
1953 
1954   // for arrays we need the length, otherwise -1
1955   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
1956 
1957   // invoke the callback
1958   int res = (*cb)(ref_kind,
1959                   (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
1960                   wrapper.klass_tag(),
1961                   wrapper.referrer_klass_tag(),
1962                   wrapper.obj_size(),
1963                   wrapper.obj_tag_p(),
1964                   wrapper.referrer_tag_p(),
1965                   len,
1966                   (void*)user_data());
1967 
1968   if (res & JVMTI_VISIT_ABORT) {
1969     return false;
1970   }
1971   if (res & JVMTI_VISIT_OBJECTS) {
1972     check_for_visit(obj);
1973   }
1974   return true;
1975 }
1976 
1977 // report a "simple root"
1978 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
1979   assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
1980          kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
1981 
1982   if (is_basic_heap_walk()) {
1983     // map to old style root kind
1984     jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
1985     return invoke_basic_heap_root_callback(root_kind, obj);
1986   } else {
1987     assert(is_advanced_heap_walk(), "wrong heap walk type");
1988     return invoke_advanced_heap_root_callback(kind, obj);
1989   }
1990 }
1991 
1992 
1993 // invoke the primitive array values
1994 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
1995   assert(obj->is_typeArray(), "not a primitive array");
1996 
1997   AdvancedHeapWalkContext* context = advanced_context();
1998   assert(context->array_primitive_value_callback() != NULL, "no callback");
1999 
2000   // apply class filter
2001   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2002     return true;
2003   }
2004 
2005   CallbackWrapper wrapper(tag_map(), obj);
2006 
2007   // apply tag filter
2008   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2009                                  wrapper.klass_tag(),
2010                                  context->heap_filter())) {
2011     return true;
2012   }
2013 
2014   // invoke the callback
2015   int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2016                                                   &wrapper,
2017                                                   obj,
2018                                                   (void*)user_data());
2019   return (!(res & JVMTI_VISIT_ABORT));
2020 }
2021 
2022 // invoke the string value callback
2023 inline bool CallbackInvoker::report_string_value(oop str) {
2024   assert(str->klass() == vmClasses::String_klass(), "not a string");
2025 
2026   AdvancedHeapWalkContext* context = advanced_context();
2027   assert(context->string_primitive_value_callback() != NULL, "no callback");
2028 
2029   // apply class filter
2030   if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2031     return true;
2032   }
2033 
2034   CallbackWrapper wrapper(tag_map(), str);
2035 
2036   // apply tag filter
2037   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2038                                  wrapper.klass_tag(),
2039                                  context->heap_filter())) {
2040     return true;
2041   }
2042 
2043   // invoke the callback
2044   int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2045                                          &wrapper,
2046                                          str,
2047                                          (void*)user_data());
2048   return (!(res & JVMTI_VISIT_ABORT));
2049 }
2050 
2051 // invoke the primitive field callback
2052 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2053                                                     oop obj,
2054                                                     jint index,
2055                                                     address addr,
2056                                                     char type)
2057 {
2058   // for primitive fields only the index will be set
2059   static jvmtiHeapReferenceInfo reference_info = { 0 };
2060 
2061   AdvancedHeapWalkContext* context = advanced_context();
2062   assert(context->primitive_field_callback() != NULL, "no callback");
2063 
2064   // apply class filter
2065   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2066     return true;
2067   }
2068 
2069   CallbackWrapper wrapper(tag_map(), obj);
2070 
2071   // apply tag filter
2072   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2073                                  wrapper.klass_tag(),
2074                                  context->heap_filter())) {
2075     return true;
2076   }
2077 
2078   // the field index in the referrer
2079   reference_info.field.index = index;
2080 
2081   // map the type
2082   jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2083 
2084   // setup the jvalue
2085   jvalue value;
2086   copy_to_jvalue(&value, addr, value_type);
2087 
2088   jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2089   int res = (*cb)(ref_kind,
2090                   &reference_info,
2091                   wrapper.klass_tag(),
2092                   wrapper.obj_tag_p(),
2093                   value,
2094                   value_type,
2095                   (void*)user_data());
2096   return (!(res & JVMTI_VISIT_ABORT));
2097 }
2098 
2099 
2100 // instance field
2101 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2102                                                              jint index,
2103                                                              address value,
2104                                                              char type) {
2105   return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2106                                 obj,
2107                                 index,
2108                                 value,
2109                                 type);
2110 }
2111 
2112 // static field
2113 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2114                                                            jint index,
2115                                                            address value,
2116                                                            char type) {
2117   return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2118                                 obj,
2119                                 index,
2120                                 value,
2121                                 type);
2122 }
2123 
2124 // report a JNI local (root object) to the profiler
2125 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2126   if (is_basic_heap_walk()) {
2127     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2128                                            thread_tag,
2129                                            depth,
2130                                            m,
2131                                            -1,
2132                                            obj);
2133   } else {
2134     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2135                                               thread_tag, tid,
2136                                               depth,
2137                                               m,
2138                                               (jlocation)-1,
2139                                               -1,
2140                                               obj);
2141   }
2142 }
2143 
2144 
2145 // report a local (stack reference, root object)
2146 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2147                                                    jlong tid,
2148                                                    jint depth,
2149                                                    jmethodID method,
2150                                                    jlocation bci,
2151                                                    jint slot,
2152                                                    oop obj) {
2153   if (is_basic_heap_walk()) {
2154     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2155                                            thread_tag,
2156                                            depth,
2157                                            method,
2158                                            slot,
2159                                            obj);
2160   } else {
2161     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2162                                               thread_tag,
2163                                               tid,
2164                                               depth,
2165                                               method,
2166                                               bci,
2167                                               slot,
2168                                               obj);
2169   }
2170 }
2171 
2172 // report an object referencing a class.
2173 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2174   if (is_basic_heap_walk()) {
2175     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2176   } else {
2177     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2178   }
2179 }
2180 
2181 // report a class referencing its class loader.
2182 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2183   if (is_basic_heap_walk()) {
2184     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2185   } else {
2186     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2187   }
2188 }
2189 
2190 // report a class referencing its signers.
2191 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2192   if (is_basic_heap_walk()) {
2193     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2194   } else {
2195     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2196   }
2197 }
2198 
2199 // report a class referencing its protection domain..
2200 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2201   if (is_basic_heap_walk()) {
2202     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2203   } else {
2204     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2205   }
2206 }
2207 
2208 // report a class referencing its superclass.
2209 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2210   if (is_basic_heap_walk()) {
2211     // Send this to be consistent with past implementation
2212     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2213   } else {
2214     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2215   }
2216 }
2217 
2218 // report a class referencing one of its interfaces.
2219 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2220   if (is_basic_heap_walk()) {
2221     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2222   } else {
2223     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2224   }
2225 }
2226 
2227 // report a class referencing one of its static fields.
2228 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2229   if (is_basic_heap_walk()) {
2230     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2231   } else {
2232     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2233   }
2234 }
2235 
2236 // report an array referencing an element object
2237 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2238   if (is_basic_heap_walk()) {
2239     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2240   } else {
2241     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2242   }
2243 }
2244 
2245 // report an object referencing an instance field object
2246 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2247   if (is_basic_heap_walk()) {
2248     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2249   } else {
2250     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2251   }
2252 }
2253 
2254 // report an array referencing an element object
2255 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2256   if (is_basic_heap_walk()) {
2257     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2258   } else {
2259     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2260   }
2261 }
2262 
2263 // A supporting closure used to process simple roots
2264 class SimpleRootsClosure : public OopClosure {
2265  private:
2266   jvmtiHeapReferenceKind _kind;
2267   bool _continue;
2268 
2269   jvmtiHeapReferenceKind root_kind()    { return _kind; }
2270 
2271  public:
2272   void set_kind(jvmtiHeapReferenceKind kind) {
2273     _kind = kind;
2274     _continue = true;
2275   }
2276 
2277   inline bool stopped() {
2278     return !_continue;
2279   }
2280 
2281   void do_oop(oop* obj_p) {
2282     // iteration has terminated
2283     if (stopped()) {
2284       return;
2285     }
2286 
2287     oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p);
2288     // ignore null
2289     if (o == NULL) {
2290       return;
2291     }
2292 
2293     assert(Universe::heap()->is_in(o), "should be impossible");
2294 
2295     jvmtiHeapReferenceKind kind = root_kind();
2296 
2297     // invoke the callback
2298     _continue = CallbackInvoker::report_simple_root(kind, o);
2299 
2300   }
2301   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2302 };
2303 
2304 // A supporting closure used to process JNI locals
2305 class JNILocalRootsClosure : public OopClosure {
2306  private:
2307   jlong _thread_tag;
2308   jlong _tid;
2309   jint _depth;
2310   jmethodID _method;
2311   bool _continue;
2312  public:
2313   void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2314     _thread_tag = thread_tag;
2315     _tid = tid;
2316     _depth = depth;
2317     _method = method;
2318     _continue = true;
2319   }
2320 
2321   inline bool stopped() {
2322     return !_continue;
2323   }
2324 
2325   void do_oop(oop* obj_p) {
2326     // iteration has terminated
2327     if (stopped()) {
2328       return;
2329     }
2330 
2331     oop o = *obj_p;
2332     // ignore null
2333     if (o == NULL) {
2334       return;
2335     }
2336 
2337     // invoke the callback
2338     _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2339   }
2340   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2341 };
2342 
2343 
2344 // A VM operation to iterate over objects that are reachable from
2345 // a set of roots or an initial object.
2346 //
2347 // For VM_HeapWalkOperation the set of roots used is :-
2348 //
2349 // - All JNI global references
2350 // - All inflated monitors
2351 // - All classes loaded by the boot class loader (or all classes
2352 //     in the event that class unloading is disabled)
2353 // - All java threads
2354 // - For each java thread then all locals and JNI local references
2355 //      on the thread's execution stack
2356 // - All visible/explainable objects from Universes::oops_do
2357 //
2358 class VM_HeapWalkOperation: public VM_Operation {
2359  private:
2360   enum {
2361     initial_visit_stack_size = 4000
2362   };
2363 
2364   bool _is_advanced_heap_walk;                      // indicates FollowReferences
2365   JvmtiTagMap* _tag_map;
2366   Handle _initial_object;
2367   GrowableArray<oop>* _visit_stack;                 // the visit stack
2368 
2369   bool _following_object_refs;                      // are we following object references
2370 
2371   bool _reporting_primitive_fields;                 // optional reporting
2372   bool _reporting_primitive_array_values;
2373   bool _reporting_string_values;
2374 
2375   GrowableArray<oop>* create_visit_stack() {
2376     return new (ResourceObj::C_HEAP, mtServiceability) GrowableArray<oop>(initial_visit_stack_size, mtServiceability);
2377   }
2378 
2379   // accessors
2380   bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2381   JvmtiTagMap* tag_map() const                     { return _tag_map; }
2382   Handle initial_object() const                    { return _initial_object; }
2383 
2384   bool is_following_references() const             { return _following_object_refs; }
2385 
2386   bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2387   bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2388   bool is_reporting_string_values() const          { return _reporting_string_values; }
2389 
2390   GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2391 
2392   // iterate over the various object types
2393   inline bool iterate_over_array(oop o);
2394   inline bool iterate_over_type_array(oop o);
2395   inline bool iterate_over_class(oop o);
2396   inline bool iterate_over_object(oop o);
2397 
2398   // root collection
2399   inline bool collect_simple_roots();
2400   inline bool collect_stack_roots();
2401   inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2402 
2403   // visit an object
2404   inline bool visit(oop o);
2405 
2406  public:
2407   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2408                        Handle initial_object,
2409                        BasicHeapWalkContext callbacks,
2410                        const void* user_data);
2411 
2412   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2413                        Handle initial_object,
2414                        AdvancedHeapWalkContext callbacks,
2415                        const void* user_data);
2416 
2417   ~VM_HeapWalkOperation();
2418 
2419   VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2420   void doit();
2421 };
2422 
2423 
2424 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2425                                            Handle initial_object,
2426                                            BasicHeapWalkContext callbacks,
2427                                            const void* user_data) {
2428   _is_advanced_heap_walk = false;
2429   _tag_map = tag_map;
2430   _initial_object = initial_object;
2431   _following_object_refs = (callbacks.object_ref_callback() != NULL);
2432   _reporting_primitive_fields = false;
2433   _reporting_primitive_array_values = false;
2434   _reporting_string_values = false;
2435   _visit_stack = create_visit_stack();
2436 
2437 
2438   CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2439 }
2440 
2441 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2442                                            Handle initial_object,
2443                                            AdvancedHeapWalkContext callbacks,
2444                                            const void* user_data) {
2445   _is_advanced_heap_walk = true;
2446   _tag_map = tag_map;
2447   _initial_object = initial_object;
2448   _following_object_refs = true;
2449   _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2450   _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2451   _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2452   _visit_stack = create_visit_stack();
2453 
2454   CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2455 }
2456 
2457 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2458   if (_following_object_refs) {
2459     assert(_visit_stack != NULL, "checking");
2460     delete _visit_stack;
2461     _visit_stack = NULL;
2462   }
2463 }
2464 
2465 // an array references its class and has a reference to
2466 // each element in the array
2467 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2468   objArrayOop array = objArrayOop(o);
2469 
2470   // array reference to its class
2471   oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2472   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2473     return false;
2474   }
2475 
2476   // iterate over the array and report each reference to a
2477   // non-null element
2478   for (int index=0; index<array->length(); index++) {
2479     oop elem = array->obj_at(index);
2480     if (elem == NULL) {
2481       continue;
2482     }
2483 
2484     // report the array reference o[index] = elem
2485     if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2486       return false;
2487     }
2488   }
2489   return true;
2490 }
2491 
2492 // a type array references its class
2493 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2494   Klass* k = o->klass();
2495   oop mirror = k->java_mirror();
2496   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2497     return false;
2498   }
2499 
2500   // report the array contents if required
2501   if (is_reporting_primitive_array_values()) {
2502     if (!CallbackInvoker::report_primitive_array_values(o)) {
2503       return false;
2504     }
2505   }
2506   return true;
2507 }
2508 
2509 #ifdef ASSERT
2510 // verify that a static oop field is in range
2511 static inline bool verify_static_oop(InstanceKlass* ik,
2512                                      oop mirror, int offset) {
2513   address obj_p = cast_from_oop<address>(mirror) + offset;
2514   address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2515   address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2516   assert(end >= start, "sanity check");
2517 
2518   if (obj_p >= start && obj_p < end) {
2519     return true;
2520   } else {
2521     return false;
2522   }
2523 }
2524 #endif // #ifdef ASSERT
2525 
2526 // a class references its super class, interfaces, class loader, ...
2527 // and finally its static fields
2528 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2529   int i;
2530   Klass* klass = java_lang_Class::as_Klass(java_class);
2531 
2532   if (klass->is_instance_klass()) {
2533     InstanceKlass* ik = InstanceKlass::cast(klass);
2534 
2535     // Ignore the class if it hasn't been initialized yet
2536     if (!ik->is_linked()) {
2537       return true;
2538     }
2539 
2540     // get the java mirror
2541     oop mirror = klass->java_mirror();
2542 
2543     // super (only if something more interesting than java.lang.Object)
2544     InstanceKlass* java_super = ik->java_super();
2545     if (java_super != NULL && java_super != vmClasses::Object_klass()) {
2546       oop super = java_super->java_mirror();
2547       if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2548         return false;
2549       }
2550     }
2551 
2552     // class loader
2553     oop cl = ik->class_loader();
2554     if (cl != NULL) {
2555       if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2556         return false;
2557       }
2558     }
2559 
2560     // protection domain
2561     oop pd = ik->protection_domain();
2562     if (pd != NULL) {
2563       if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2564         return false;
2565       }
2566     }
2567 
2568     // signers
2569     oop signers = ik->signers();
2570     if (signers != NULL) {
2571       if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2572         return false;
2573       }
2574     }
2575 
2576     // references from the constant pool
2577     {
2578       ConstantPool* pool = ik->constants();
2579       for (int i = 1; i < pool->length(); i++) {
2580         constantTag tag = pool->tag_at(i).value();
2581         if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) {
2582           oop entry;
2583           if (tag.is_string()) {
2584             entry = pool->resolved_string_at(i);
2585             // If the entry is non-null it is resolved.
2586             if (entry == NULL) {
2587               continue;
2588             }
2589           } else if (tag.is_klass()) {
2590             entry = pool->resolved_klass_at(i)->java_mirror();
2591           } else {
2592             // Code generated by JIT compilers might not resolve constant
2593             // pool entries.  Treat them as resolved if they are loaded.
2594             assert(tag.is_unresolved_klass(), "must be");
2595             constantPoolHandle cp(Thread::current(), pool);
2596             Klass* klass = ConstantPool::klass_at_if_loaded(cp, i);
2597             if (klass == NULL) {
2598               continue;
2599             }
2600             entry = klass->java_mirror();
2601           }
2602           if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2603             return false;
2604           }
2605         }
2606       }
2607     }
2608 
2609     // interfaces
2610     // (These will already have been reported as references from the constant pool
2611     //  but are specified by IterateOverReachableObjects and must be reported).
2612     Array<InstanceKlass*>* interfaces = ik->local_interfaces();
2613     for (i = 0; i < interfaces->length(); i++) {
2614       oop interf = interfaces->at(i)->java_mirror();
2615       if (interf == NULL) {
2616         continue;
2617       }
2618       if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2619         return false;
2620       }
2621     }
2622 
2623     // iterate over the static fields
2624 
2625     ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2626     for (i=0; i<field_map->field_count(); i++) {
2627       ClassFieldDescriptor* field = field_map->field_at(i);
2628       char type = field->field_type();
2629       if (!is_primitive_field_type(type)) {
2630         oop fld_o = mirror->obj_field(field->field_offset());
2631         assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2632         if (fld_o != NULL) {
2633           int slot = field->field_index();
2634           if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2635             delete field_map;
2636             return false;
2637           }
2638         }
2639       } else {
2640          if (is_reporting_primitive_fields()) {
2641            address addr = cast_from_oop<address>(mirror) + field->field_offset();
2642            int slot = field->field_index();
2643            if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2644              delete field_map;
2645              return false;
2646           }
2647         }
2648       }
2649     }
2650     delete field_map;
2651 
2652     return true;
2653   }
2654 
2655   return true;
2656 }
2657 
2658 // an object references a class and its instance fields
2659 // (static fields are ignored here as we report these as
2660 // references from the class).
2661 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2662   // reference to the class
2663   if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2664     return false;
2665   }
2666 
2667   // iterate over instance fields
2668   ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2669   for (int i=0; i<field_map->field_count(); i++) {
2670     ClassFieldDescriptor* field = field_map->field_at(i);
2671     char type = field->field_type();
2672     if (!is_primitive_field_type(type)) {
2673       oop fld_o = o->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field->field_offset());
2674       // ignore any objects that aren't visible to profiler
2675       if (fld_o != NULL) {
2676         assert(Universe::heap()->is_in(fld_o), "unsafe code should not "
2677                "have references to Klass* anymore");
2678         int slot = field->field_index();
2679         if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2680           return false;
2681         }
2682       }
2683     } else {
2684       if (is_reporting_primitive_fields()) {
2685         // primitive instance field
2686         address addr = cast_from_oop<address>(o) + field->field_offset();
2687         int slot = field->field_index();
2688         if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2689           return false;
2690         }
2691       }
2692     }
2693   }
2694 
2695   // if the object is a java.lang.String
2696   if (is_reporting_string_values() &&
2697       o->klass() == vmClasses::String_klass()) {
2698     if (!CallbackInvoker::report_string_value(o)) {
2699       return false;
2700     }
2701   }
2702   return true;
2703 }
2704 
2705 
2706 // Collects all simple (non-stack) roots except for threads;
2707 // threads are handled in collect_stack_roots() as an optimization.
2708 // if there's a heap root callback provided then the callback is
2709 // invoked for each simple root.
2710 // if an object reference callback is provided then all simple
2711 // roots are pushed onto the marking stack so that they can be
2712 // processed later
2713 //
2714 inline bool VM_HeapWalkOperation::collect_simple_roots() {
2715   SimpleRootsClosure blk;
2716 
2717   // JNI globals
2718   blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
2719   JNIHandles::oops_do(&blk);
2720   if (blk.stopped()) {
2721     return false;
2722   }
2723 
2724   // Preloaded classes and loader from the system dictionary
2725   blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
2726   CLDToOopClosure cld_closure(&blk, false);
2727   ClassLoaderDataGraph::always_strong_cld_do(&cld_closure);
2728   if (blk.stopped()) {
2729     return false;
2730   }
2731 
2732   // threads are now handled in collect_stack_roots()
2733 
2734   // Other kinds of roots maintained by HotSpot
2735   // Many of these won't be visible but others (such as instances of important
2736   // exceptions) will be visible.
2737   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
2738   Universe::vm_global()->oops_do(&blk);
2739   if (blk.stopped()) {
2740     return false;
2741   }
2742 
2743   return true;
2744 }
2745 
2746 // Walk the stack of a given thread and find all references (locals
2747 // and JNI calls) and report these as stack references
2748 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
2749                                                       JNILocalRootsClosure* blk)
2750 {
2751   oop threadObj = java_thread->threadObj();
2752   assert(threadObj != NULL, "sanity check");
2753 
2754   // only need to get the thread's tag once per thread
2755   jlong thread_tag = tag_for(_tag_map, threadObj);
2756 
2757   // also need the thread id
2758   jlong tid = java_lang_Thread::thread_id(threadObj);
2759 
2760 
2761   if (java_thread->has_last_Java_frame()) {
2762 
2763     // vframes are resource allocated
2764     Thread* current_thread = Thread::current();
2765     ResourceMark rm(current_thread);
2766     HandleMark hm(current_thread);
2767 
2768     RegisterMap reg_map(java_thread);
2769     frame f = java_thread->last_frame();
2770     vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
2771 
2772     bool is_top_frame = true;
2773     int depth = 0;
2774     frame* last_entry_frame = NULL;
2775 
2776     while (vf != NULL) {
2777       if (vf->is_java_frame()) {
2778 
2779         // java frame (interpreted, compiled, ...)
2780         javaVFrame *jvf = javaVFrame::cast(vf);
2781 
2782         // the jmethodID
2783         jmethodID method = jvf->method()->jmethod_id();
2784 
2785         if (!(jvf->method()->is_native())) {
2786           jlocation bci = (jlocation)jvf->bci();
2787           StackValueCollection* locals = jvf->locals();
2788           for (int slot=0; slot<locals->size(); slot++) {
2789             if (locals->at(slot)->type() == T_OBJECT) {
2790               oop o = locals->obj_at(slot)();
2791               if (o == NULL) {
2792                 continue;
2793               }
2794 
2795               // stack reference
2796               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
2797                                                    bci, slot, o)) {
2798                 return false;
2799               }
2800             }
2801           }
2802 
2803           StackValueCollection* exprs = jvf->expressions();
2804           for (int index=0; index < exprs->size(); index++) {
2805             if (exprs->at(index)->type() == T_OBJECT) {
2806               oop o = exprs->obj_at(index)();
2807               if (o == NULL) {
2808                 continue;
2809               }
2810 
2811               // stack reference
2812               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
2813                                                    bci, locals->size() + index, o)) {
2814                 return false;
2815               }
2816             }
2817           }
2818 
2819           // Follow oops from compiled nmethod
2820           if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
2821             blk->set_context(thread_tag, tid, depth, method);
2822             jvf->cb()->as_nmethod()->oops_do(blk);
2823           }
2824         } else {
2825           blk->set_context(thread_tag, tid, depth, method);
2826           if (is_top_frame) {
2827             // JNI locals for the top frame.
2828             java_thread->active_handles()->oops_do(blk);
2829           } else {
2830             if (last_entry_frame != NULL) {
2831               // JNI locals for the entry frame
2832               assert(last_entry_frame->is_entry_frame(), "checking");
2833               last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
2834             }
2835           }
2836         }
2837         last_entry_frame = NULL;
2838         depth++;
2839       } else {
2840         // externalVFrame - for an entry frame then we report the JNI locals
2841         // when we find the corresponding javaVFrame
2842         frame* fr = vf->frame_pointer();
2843         assert(fr != NULL, "sanity check");
2844         if (fr->is_entry_frame()) {
2845           last_entry_frame = fr;
2846         }
2847       }
2848 
2849       vf = vf->sender();
2850       is_top_frame = false;
2851     }
2852   } else {
2853     // no last java frame but there may be JNI locals
2854     blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
2855     java_thread->active_handles()->oops_do(blk);
2856   }
2857   return true;
2858 }
2859 
2860 
2861 // Collects the simple roots for all threads and collects all
2862 // stack roots - for each thread it walks the execution
2863 // stack to find all references and local JNI refs.
2864 inline bool VM_HeapWalkOperation::collect_stack_roots() {
2865   JNILocalRootsClosure blk;
2866   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
2867     oop threadObj = thread->threadObj();
2868     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
2869       // Collect the simple root for this thread before we
2870       // collect its stack roots
2871       if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
2872                                                threadObj)) {
2873         return false;
2874       }
2875       if (!collect_stack_roots(thread, &blk)) {
2876         return false;
2877       }
2878     }
2879   }
2880   return true;
2881 }
2882 
2883 // visit an object
2884 // first mark the object as visited
2885 // second get all the outbound references from this object (in other words, all
2886 // the objects referenced by this object).
2887 //
2888 bool VM_HeapWalkOperation::visit(oop o) {
2889   // mark object as visited
2890   assert(!ObjectMarker::visited(o), "can't visit same object more than once");
2891   ObjectMarker::mark(o);
2892 
2893   // instance
2894   if (o->is_instance()) {
2895     if (o->klass() == vmClasses::Class_klass()) {
2896       if (!java_lang_Class::is_primitive(o)) {
2897         // a java.lang.Class
2898         return iterate_over_class(o);
2899       }
2900     } else {
2901       return iterate_over_object(o);
2902     }
2903   }
2904 
2905   // object array
2906   if (o->is_objArray()) {
2907     return iterate_over_array(o);
2908   }
2909 
2910   // type array
2911   if (o->is_typeArray()) {
2912     return iterate_over_type_array(o);
2913   }
2914 
2915   return true;
2916 }
2917 
2918 void VM_HeapWalkOperation::doit() {
2919   ResourceMark rm;
2920   ObjectMarkerController marker;
2921   ClassFieldMapCacheMark cm;
2922 
2923   JvmtiTagMap::check_hashmaps_for_heapwalk();
2924 
2925   assert(visit_stack()->is_empty(), "visit stack must be empty");
2926 
2927   // the heap walk starts with an initial object or the heap roots
2928   if (initial_object().is_null()) {
2929     // If either collect_stack_roots() or collect_simple_roots()
2930     // returns false at this point, then there are no mark bits
2931     // to reset.
2932     ObjectMarker::set_needs_reset(false);
2933 
2934     // Calling collect_stack_roots() before collect_simple_roots()
2935     // can result in a big performance boost for an agent that is
2936     // focused on analyzing references in the thread stacks.
2937     if (!collect_stack_roots()) return;
2938 
2939     if (!collect_simple_roots()) return;
2940 
2941     // no early return so enable heap traversal to reset the mark bits
2942     ObjectMarker::set_needs_reset(true);
2943   } else {
2944     visit_stack()->push(initial_object()());
2945   }
2946 
2947   // object references required
2948   if (is_following_references()) {
2949 
2950     // visit each object until all reachable objects have been
2951     // visited or the callback asked to terminate the iteration.
2952     while (!visit_stack()->is_empty()) {
2953       oop o = visit_stack()->pop();
2954       if (!ObjectMarker::visited(o)) {
2955         if (!visit(o)) {
2956           break;
2957         }
2958       }
2959     }
2960   }
2961 }
2962 
2963 // iterate over all objects that are reachable from a set of roots
2964 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
2965                                                  jvmtiStackReferenceCallback stack_ref_callback,
2966                                                  jvmtiObjectReferenceCallback object_ref_callback,
2967                                                  const void* user_data) {
2968   JavaThread* jt = JavaThread::current();
2969   EscapeBarrier eb(true, jt);
2970   eb.deoptimize_objects_all_threads();
2971   MutexLocker ml(Heap_lock);
2972   BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
2973   VM_HeapWalkOperation op(this, Handle(), context, user_data);
2974   VMThread::execute(&op);
2975 }
2976 
2977 // iterate over all objects that are reachable from a given object
2978 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
2979                                                              jvmtiObjectReferenceCallback object_ref_callback,
2980                                                              const void* user_data) {
2981   oop obj = JNIHandles::resolve(object);
2982   Handle initial_object(Thread::current(), obj);
2983 
2984   MutexLocker ml(Heap_lock);
2985   BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
2986   VM_HeapWalkOperation op(this, initial_object, context, user_data);
2987   VMThread::execute(&op);
2988 }
2989 
2990 // follow references from an initial object or the GC roots
2991 void JvmtiTagMap::follow_references(jint heap_filter,
2992                                     Klass* klass,
2993                                     jobject object,
2994                                     const jvmtiHeapCallbacks* callbacks,
2995                                     const void* user_data)
2996 {
2997   oop obj = JNIHandles::resolve(object);
2998   JavaThread* jt = JavaThread::current();
2999   Handle initial_object(jt, obj);
3000   // EA based optimizations that are tagged or reachable from initial_object are already reverted.
3001   EscapeBarrier eb(initial_object.is_null() &&
3002                    !(heap_filter & JVMTI_HEAP_FILTER_UNTAGGED),
3003                    jt);
3004   eb.deoptimize_objects_all_threads();
3005   MutexLocker ml(Heap_lock);
3006   AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3007   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3008   VMThread::execute(&op);
3009 }
3010 
3011 // Concurrent GC needs to call this in relocation pause, so after the objects are moved
3012 // and have their new addresses, the table can be rehashed.
3013 void JvmtiTagMap::set_needs_rehashing() {
3014   assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause");
3015   assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3016 
3017   JvmtiEnvIterator it;
3018   for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
3019     JvmtiTagMap* tag_map = env->tag_map_acquire();
3020     if (tag_map != NULL) {
3021       tag_map->_needs_rehashing = true;
3022     }
3023   }
3024 }
3025 
3026 // Verify gc_notification follows set_needs_cleaning.
3027 DEBUG_ONLY(static bool notified_needs_cleaning = false;)
3028 
3029 void JvmtiTagMap::set_needs_cleaning() {
3030   assert(SafepointSynchronize::is_at_safepoint(), "called in gc pause");
3031   assert(Thread::current()->is_VM_thread(), "should be the VM thread");
3032   // Can't assert !notified_needs_cleaning; a partial GC might be upgraded
3033   // to a full GC and do this twice without intervening gc_notification.
3034   DEBUG_ONLY(notified_needs_cleaning = true;)
3035 
3036   JvmtiEnvIterator it;
3037   for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
3038     JvmtiTagMap* tag_map = env->tag_map_acquire();
3039     if (tag_map != NULL) {
3040       tag_map->_needs_cleaning = !tag_map->is_empty();
3041     }
3042   }
3043 }
3044 
3045 void JvmtiTagMap::gc_notification(size_t num_dead_entries) {
3046   assert(notified_needs_cleaning, "missing GC notification");
3047   DEBUG_ONLY(notified_needs_cleaning = false;)
3048 
3049   // Notify ServiceThread if there's work to do.
3050   {
3051     MonitorLocker ml(Service_lock, Mutex::_no_safepoint_check_flag);
3052     _has_object_free_events = (num_dead_entries != 0);
3053     if (_has_object_free_events) ml.notify_all();
3054   }
3055 
3056   // If no dead entries then cancel cleaning requests.
3057   if (num_dead_entries == 0) {
3058     JvmtiEnvIterator it;
3059     for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
3060       JvmtiTagMap* tag_map = env->tag_map_acquire();
3061       if (tag_map != NULL) {
3062         MutexLocker ml (tag_map->lock(), Mutex::_no_safepoint_check_flag);
3063         tag_map->_needs_cleaning = false;
3064       }
3065     }
3066   }
3067 }
3068 
3069 // Used by ServiceThread to discover there is work to do.
3070 bool JvmtiTagMap::has_object_free_events_and_reset() {
3071   assert_lock_strong(Service_lock);
3072   bool result = _has_object_free_events;
3073   _has_object_free_events = false;
3074   return result;
3075 }
3076 
3077 // Used by ServiceThread to clean up tagmaps.
3078 void JvmtiTagMap::flush_all_object_free_events() {
3079   JavaThread* thread = JavaThread::current();
3080   JvmtiEnvIterator it;
3081   for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
3082     JvmtiTagMap* tag_map = env->tag_map_acquire();
3083     if (tag_map != NULL) {
3084       tag_map->flush_object_free_events();
3085       ThreadBlockInVM tbiv(thread); // Be safepoint-polite while looping.
3086     }
3087   }
3088 }