1 /*
   2  * Copyright (c) 2017, 2025, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
   5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   6  *
   7  * This code is free software; you can redistribute it and/or modify it
   8  * under the terms of the GNU General Public License version 2 only, as
   9  * published by the Free Software Foundation.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  *
  25  */
  26 
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahGeneration.hpp"
  31 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  32 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  33 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  34 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
  37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  38 #include "gc/shenandoah/shenandoahUtils.hpp"
  39 #include "gc/shenandoah/shenandoahVerifier.hpp"
  40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  41 #include "memory/allocation.hpp"
  42 #include "memory/iterator.inline.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/compressedOops.inline.hpp"
  45 #include "runtime/atomic.hpp"
  46 #include "runtime/orderAccess.hpp"
  47 #include "runtime/threads.hpp"
  48 #include "utilities/align.hpp"
  49 
  50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  51 #ifdef verify_oop
  52 #undef verify_oop
  53 #endif
  54 
  55 static bool is_instance_ref_klass(Klass* k) {
  56   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  57 }
  58 
  59 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  60 private:
  61   const char* _phase;
  62   ShenandoahVerifier::VerifyOptions _options;
  63   ShenandoahVerifierStack* _stack;
  64   ShenandoahHeap* _heap;
  65   MarkBitMap* _map;
  66   ShenandoahLivenessData* _ld;
  67   void* _interior_loc;
  68   oop _loc;
  69   ReferenceIterationMode _ref_mode;
  70   ShenandoahGeneration* _generation;
  71 
  72 public:
  73   ShenandoahVerifyOopClosure(ShenandoahGeneration* generation, ShenandoahVerifierStack* stack,
  74                              MarkBitMap* map, ShenandoahLivenessData* ld,
  75                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  76     _phase(phase),
  77     _options(options),
  78     _stack(stack),
  79     _heap(ShenandoahHeap::heap()),
  80     _map(map),
  81     _ld(ld),
  82     _interior_loc(nullptr),
  83     _loc(nullptr),
  84     _generation(generation) {
  85     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  86         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  87         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  88       // Unknown status for Reference.referent field. Do not touch it, it might be dead.
  89       // Normally, barriers would prevent us from seeing the dead referents, but verifier
  90       // runs with barriers disabled.
  91       _ref_mode = DO_FIELDS_EXCEPT_REFERENT;
  92     } else {
  93       // Otherwise do all fields.
  94       _ref_mode = DO_FIELDS;
  95     }
  96   }
  97 
  98   ReferenceIterationMode reference_iteration_mode() override {
  99     return _ref_mode;
 100   }
 101 
 102 private:
 103   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 104     if (!test) {
 105       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 106     }
 107   }
 108 
 109   template <class T>
 110   void do_oop_work(T* p) {
 111     T o = RawAccess<>::oop_load(p);
 112     if (!CompressedOops::is_null(o)) {
 113       // Basic verification should happen before we touch anything else.
 114       // For performance reasons, only fully verify non-marked field values.
 115       // We are here when the host object for *p is already marked.
 116       oop obj = CompressedOops::decode_raw_not_null(o);
 117       verify_oop_at_basic(p, obj);
 118 
 119       if (is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 120         obj = ShenandoahForwarding::get_forwardee(obj);
 121       }
 122       if (in_generation(obj) && _map->par_mark(obj)) {
 123         verify_oop_at(p, obj);
 124         _stack->push(ShenandoahVerifierTask(obj));
 125       }
 126     }
 127   }
 128 
 129   bool in_generation(oop obj) const {
 130     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 131     return _generation->contains(region);
 132   }
 133 
 134   void verify_oop(oop obj, bool basic = false) {
 135     // Perform consistency checks with gradually decreasing safety level. This guarantees
 136     // that failure report would not try to touch something that was not yet verified to be
 137     // safe to process.
 138 
 139     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in_reserved(obj),
 140               "oop must be in heap bounds");
 141     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 142               "oop must be aligned");
 143     check(ShenandoahAsserts::_safe_unknown, obj, os::is_readable_pointer(obj),
 144               "oop must be accessible");
 145 
 146     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 147 
 148     narrowKlass nk = 0;
 149     const Klass* obj_klass = nullptr;
 150     const bool klass_valid = ShenandoahAsserts::extract_klass_safely(obj, nk, obj_klass);
 151 
 152     check(ShenandoahAsserts::_safe_unknown, obj, klass_valid,
 153            "Object klass pointer unreadable or invalid");
 154 
 155     // Verify that obj is not in dead space:
 156     {
 157       // Do this before touching obj->size()
 158       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 159              "Object klass pointer must go to metaspace");
 160 
 161       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 162       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 163              "Object start should be within the region");
 164 
 165       if (!obj_reg->is_humongous()) {
 166         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + ShenandoahForwarding::size(obj)) <= obj_reg->top(),
 167                "Object end should be within the region");
 168       } else {
 169         size_t humongous_start = obj_reg->index();
 170         size_t humongous_end = humongous_start + (ShenandoahForwarding::size(obj) >> ShenandoahHeapRegion::region_size_words_shift());
 171         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 172           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 173                  "Humongous object is in continuation that fits it");
 174         }
 175       }
 176 
 177       check(ShenandoahAsserts::_safe_unknown, obj, obj_reg->is_active(),
 178            "Object should be in active region");
 179 
 180       // ------------ obj is safe at this point --------------
 181 
 182       if (basic) {
 183         return;
 184       }
 185 
 186       switch (_options._verify_liveness) {
 187         case ShenandoahVerifier::_verify_liveness_disable:
 188           // skip
 189           break;
 190         case ShenandoahVerifier::_verify_liveness_complete:
 191           _ld[obj_reg->index()].add_then_fetch((uint) ShenandoahForwarding::size(obj), memory_order_relaxed);
 192           // fallthrough for fast failure for un-live regions:
 193         case ShenandoahVerifier::_verify_liveness_conservative:
 194           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 195                 (obj_reg->is_old() && _generation->is_young()),
 196                    "Object must belong to region with live data");
 197           break;
 198         default:
 199           assert(false, "Unhandled liveness verification");
 200       }
 201     }
 202 
 203     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 204 
 205     ShenandoahHeapRegion* fwd_reg = nullptr;
 206 
 207     if (obj != fwd) {
 208       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in_reserved(fwd),
 209              "Forwardee must be in heap bounds");
 210       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 211              "Forwardee is set");
 212       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 213              "Forwardee must be aligned");
 214 
 215       // Do this before touching fwd->size()
 216       Klass* fwd_klass = fwd->klass_or_null();
 217       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 218              "Forwardee klass pointer should not be null");
 219       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 220              "Forwardee klass pointer must go to metaspace");
 221       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 222              "Forwardee klass pointer must go to metaspace");
 223 
 224       fwd_reg = _heap->heap_region_containing(fwd);
 225 
 226       check(ShenandoahAsserts::_safe_oop, obj, fwd_reg->is_active(),
 227             "Forwardee should be in active region");
 228 
 229       // Verify that forwardee is not in the dead space:
 230       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 231              "Should have no humongous forwardees");
 232 
 233       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 234       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 235              "Forwardee start should be within the region");
 236       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + ShenandoahForwarding::size(fwd)) <= fwd_reg->top(),
 237              "Forwardee end should be within the region");
 238 
 239       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 240       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 241              "Double forwarding");
 242     } else {
 243       fwd_reg = obj_reg;
 244     }
 245 
 246     // Do additional checks for special objects: their fields can hold metadata as well.
 247     // We want to check class loading/unloading did not corrupt them. We can only reasonably
 248     // trust the forwarded objects, as the from-space object can have the klasses effectively
 249     // dead.
 250 
 251     if (obj_klass == vmClasses::Class_klass()) {
 252       const Metadata* klass = fwd->metadata_field(java_lang_Class::klass_offset());
 253       check(ShenandoahAsserts::_safe_oop, obj,
 254             klass == nullptr || Metaspace::contains(klass),
 255             "Mirrored instance class should point to Metaspace");
 256 
 257       const Metadata* array_klass = obj->metadata_field(java_lang_Class::array_klass_offset());
 258       check(ShenandoahAsserts::_safe_oop, obj,
 259             array_klass == nullptr || Metaspace::contains(array_klass),
 260             "Mirrored array class should point to Metaspace");
 261     }
 262 
 263     // ------------ obj and fwd are safe at this point --------------
 264     switch (_options._verify_marked) {
 265       case ShenandoahVerifier::_verify_marked_disable:
 266         // skip
 267         break;
 268       case ShenandoahVerifier::_verify_marked_incomplete:
 269         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked(obj),
 270                "Must be marked in incomplete bitmap");
 271         break;
 272       case ShenandoahVerifier::_verify_marked_complete:
 273         check(ShenandoahAsserts::_safe_all, obj, _generation->complete_marking_context()->is_marked(obj),
 274                "Must be marked in complete bitmap");
 275         break;
 276       case ShenandoahVerifier::_verify_marked_complete_except_references:
 277       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 278         check(ShenandoahAsserts::_safe_all, obj, _generation->complete_marking_context()->is_marked(obj),
 279               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 280         break;
 281       default:
 282         assert(false, "Unhandled mark verification");
 283     }
 284 
 285     switch (_options._verify_forwarded) {
 286       case ShenandoahVerifier::_verify_forwarded_disable:
 287         // skip
 288         break;
 289       case ShenandoahVerifier::_verify_forwarded_none: {
 290         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 291                "Should not be forwarded");
 292         break;
 293       }
 294       case ShenandoahVerifier::_verify_forwarded_allow: {
 295         if (obj != fwd) {
 296           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 297                  "Forwardee should be in another region");
 298         }
 299         break;
 300       }
 301       default:
 302         assert(false, "Unhandled forwarding verification");
 303     }
 304 
 305     switch (_options._verify_cset) {
 306       case ShenandoahVerifier::_verify_cset_disable:
 307         // skip
 308         break;
 309       case ShenandoahVerifier::_verify_cset_none:
 310         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 311                "Should not have references to collection set");
 312         break;
 313       case ShenandoahVerifier::_verify_cset_forwarded:
 314         if (_heap->in_collection_set(obj)) {
 315           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 316                  "Object in collection set, should have forwardee");
 317         }
 318         break;
 319       default:
 320         assert(false, "Unhandled cset verification");
 321     }
 322 
 323   }
 324 
 325 public:
 326   /**
 327    * Verify object with known interior reference.
 328    * @param p interior reference where the object is referenced from; can be off-heap
 329    * @param obj verified object
 330    */
 331   template <class T>
 332   void verify_oop_at(T* p, oop obj) {
 333     _interior_loc = p;
 334     verify_oop(obj);
 335     _interior_loc = nullptr;
 336   }
 337 
 338   /**
 339    * Verify object with known interior reference, with only basic verification.
 340    * @param p interior reference where the object is referenced from; can be off-heap
 341    * @param obj verified object
 342    */
 343   template <class T>
 344   void verify_oop_at_basic(T* p, oop obj) {
 345     _interior_loc = p;
 346     verify_oop(obj, /* basic = */ true);
 347     _interior_loc = nullptr;
 348   }
 349 
 350   /**
 351    * Verify object without known interior reference.
 352    * Useful when picking up the object at known offset in heap,
 353    * but without knowing what objects reference it.
 354    * @param obj verified object
 355    */
 356   void verify_oop_standalone(oop obj) {
 357     _interior_loc = nullptr;
 358     verify_oop(obj);
 359     _interior_loc = nullptr;
 360   }
 361 
 362   /**
 363    * Verify oop fields from this object.
 364    * @param obj host object for verified fields
 365    */
 366   void verify_oops_from(oop obj) {
 367     _loc = obj;
 368     // oop_iterate() can not deal with forwarded objects, because
 369     // it needs to load klass(), which may be overridden by the
 370     // forwarding pointer.
 371     oop fwd = ShenandoahForwarding::get_forwardee_raw(obj);
 372     fwd->oop_iterate(this);
 373     _loc = nullptr;
 374   }
 375 
 376   void do_oop(oop* p) override { do_oop_work(p); }
 377   void do_oop(narrowOop* p) override { do_oop_work(p); }
 378 };
 379 
 380 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 381 // a subset (e.g. the young generation or old generation) of the total heap.
 382 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 383 private:
 384   size_t _used, _committed, _garbage, _regions, _humongous_waste, _trashed_regions, _trashed_used;
 385   size_t _region_size_bytes, _min_free_size;
 386 public:
 387   ShenandoahCalculateRegionStatsClosure() :
 388      _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0), _trashed_regions(0), _trashed_used(0)
 389   {
 390     _region_size_bytes = ShenandoahHeapRegion::region_size_bytes();
 391     // Retired regions are not necessarily filled, thouugh their remnant memory is considered used.
 392     _min_free_size = PLAB::min_size() * HeapWordSize;
 393   };
 394 
 395   void heap_region_do(ShenandoahHeapRegion* r) override {
 396     if (r->is_cset() || r->is_trash()) {
 397       // Count the entire cset or trashed (formerly cset) region as used
 398       // Note: Immediate garbage trash regions were never in the cset.
 399       _used += _region_size_bytes;
 400       _garbage += _region_size_bytes - r->get_live_data_bytes();
 401       if (r->is_trash()) {
 402         _trashed_regions++;
 403         _trashed_used += _region_size_bytes;
 404       }
 405     } else {
 406       if (r->is_humongous()) {
 407         _used += _region_size_bytes;
 408         _garbage += _region_size_bytes - r->get_live_data_bytes();
 409         _humongous_waste += r->free();
 410       } else {
 411         size_t alloc_capacity = r->free();
 412         if (alloc_capacity < _min_free_size) {
 413           // this region has been retired already, count it as entirely consumed
 414           alloc_capacity = 0;
 415         }
 416         size_t bytes_used_in_region = _region_size_bytes - alloc_capacity;
 417         size_t bytes_garbage_in_region = bytes_used_in_region - r->get_live_data_bytes();
 418         size_t waste_bytes = r->free();
 419         _used += bytes_used_in_region;
 420         _garbage += bytes_garbage_in_region;
 421       }
 422     }
 423     _committed += r->is_committed() ? _region_size_bytes : 0;
 424     _regions++;
 425     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding %zu for %s Region %zu, yielding: %zu",
 426             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 427   }
 428 
 429   size_t used() const { return _used; }
 430   size_t used_after_recycle() const { return _used - _trashed_used; }
 431   size_t committed() const { return _committed; }
 432   size_t garbage() const { return _garbage; }
 433   size_t regions() const { return _regions; }
 434   size_t trashed_regions() const { return _trashed_regions; }
 435   size_t waste() const { return _humongous_waste; }
 436 
 437   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 438   size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 439   size_t non_trashed_span() const {
 440     assert(_regions >= _trashed_regions, "sanity");
 441     return (_regions - _trashed_regions) * ShenandoahHeapRegion::region_size_bytes();
 442   }
 443   size_t non_trashed_committed() const {
 444     assert(_committed >= _trashed_regions * ShenandoahHeapRegion::region_size_bytes(), "sanity");
 445     return _committed - (_trashed_regions * ShenandoahHeapRegion::region_size_bytes());
 446   }
 447 };
 448 
 449 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 450  public:
 451   ShenandoahCalculateRegionStatsClosure _old;
 452   ShenandoahCalculateRegionStatsClosure _young;
 453   ShenandoahCalculateRegionStatsClosure _global;
 454 
 455   void heap_region_do(ShenandoahHeapRegion* r) override {
 456     switch (r->affiliation()) {
 457       case FREE:
 458         return;
 459       case YOUNG_GENERATION:
 460         _young.heap_region_do(r);
 461         _global.heap_region_do(r);
 462         break;
 463       case OLD_GENERATION:
 464         _old.heap_region_do(r);
 465         _global.heap_region_do(r);
 466         break;
 467       default:
 468         ShouldNotReachHere();
 469     }
 470   }
 471 
 472   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 473     log_debug(gc)("Safepoint verification: %s verified usage: %zu%s, recorded usage: %zu%s",
 474                   generation->name(),
 475                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 476                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 477   }
 478 
 479   static void validate_usage(const bool adjust_for_padding, const bool adjust_for_trash,
 480                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 481     ShenandoahHeap* heap = ShenandoahHeap::heap();
 482     size_t generation_used = generation->used();
 483     size_t generation_used_regions = generation->used_regions();
 484 
 485     size_t stats_used = adjust_for_trash? stats.used_after_recycle(): stats.used();
 486     guarantee(stats_used == generation_used,
 487               "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT,
 488               label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats_used));
 489 
 490     size_t stats_regions = adjust_for_trash? stats.regions() - stats.trashed_regions(): stats.regions();
 491     guarantee(stats_regions == generation_used_regions,
 492               "%s: generation (%s) used regions (%zu) must equal regions that are in use (%zu)%s",
 493               label, generation->name(), generation->used_regions(), stats_regions,
 494               adjust_for_trash? " (after adjusting for trash)": "");
 495 
 496     size_t generation_capacity = generation->max_capacity();
 497     guarantee(stats.non_trashed_span() <= generation_capacity,
 498               "%s: generation (%s) size spanned by regions (%zu) * region size (" PROPERFMT
 499               ") must not exceed current capacity (" PROPERFMT ")",
 500               label, generation->name(), stats.regions(), PROPERFMTARGS(ShenandoahHeapRegion::region_size_bytes()),
 501               PROPERFMTARGS(generation_capacity));
 502 
 503     size_t humongous_waste = generation->get_humongous_waste();
 504     guarantee(stats.waste() == humongous_waste,
 505               "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT,
 506               label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste()));
 507   }
 508 };
 509 
 510 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 511 private:
 512   ShenandoahHeap* _heap;
 513   const char* _phase;
 514   ShenandoahVerifier::VerifyRegions _regions;
 515   public:
 516   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 517       _heap(ShenandoahHeap::heap()),
 518       _phase(phase),
 519       _regions(regions) {};
 520 
 521   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 522     ResourceMark rm;
 523 
 524     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 525 
 526     stringStream ss;
 527     r->print_on(&ss);
 528     msg.append("%s", ss.as_string());
 529 
 530     report_vm_error(__FILE__, __LINE__, msg.buffer());
 531   }
 532 
 533   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 534     if (!test) {
 535       print_failure(r, msg);
 536     }
 537   }
 538 
 539   void heap_region_do(ShenandoahHeapRegion* r) override {
 540     switch (_regions) {
 541       case ShenandoahVerifier::_verify_regions_disable:
 542         break;
 543       case ShenandoahVerifier::_verify_regions_notrash:
 544         verify(r, !r->is_trash(),
 545                "Should not have trash regions");
 546         break;
 547       case ShenandoahVerifier::_verify_regions_nocset:
 548         verify(r, !r->is_cset(),
 549                "Should not have cset regions");
 550         break;
 551       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 552         verify(r, !r->is_trash(),
 553                "Should not have trash regions");
 554         verify(r, !r->is_cset(),
 555                "Should not have cset regions");
 556         break;
 557       default:
 558         ShouldNotReachHere();
 559     }
 560 
 561     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 562            "Capacity should match region size");
 563 
 564     verify(r, r->bottom() <= r->top(),
 565            "Region top should not be less than bottom");
 566 
 567     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 568            "Region TAMS should not be less than bottom");
 569 
 570     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 571            "Complete TAMS should not be larger than top");
 572 
 573     verify(r, r->get_live_data_bytes() <= r->capacity(),
 574            "Live data cannot be larger than capacity");
 575 
 576     verify(r, r->garbage() <= r->capacity(),
 577            "Garbage cannot be larger than capacity");
 578 
 579     verify(r, r->used() <= r->capacity(),
 580            "Used cannot be larger than capacity");
 581 
 582     verify(r, r->get_shared_allocs() <= r->capacity(),
 583            "Shared alloc count should not be larger than capacity");
 584 
 585     verify(r, r->get_tlab_allocs() <= r->capacity(),
 586            "TLAB alloc count should not be larger than capacity");
 587 
 588     verify(r, r->get_gclab_allocs() <= r->capacity(),
 589            "GCLAB alloc count should not be larger than capacity");
 590 
 591     verify(r, r->get_plab_allocs() <= r->capacity(),
 592            "PLAB alloc count should not be larger than capacity");
 593 
 594     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 595            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 596 
 597     verify(r, !r->is_empty() || !r->has_live(),
 598            "Empty regions should not have live data");
 599 
 600     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 601            "Transitional: region flags and collection set agree");
 602   }
 603 };
 604 
 605 class ShenandoahVerifierReachableTask : public WorkerTask {
 606 private:
 607   const char* _label;
 608   ShenandoahVerifier::VerifyOptions _options;
 609   ShenandoahHeap* _heap;
 610   ShenandoahLivenessData* _ld;
 611   MarkBitMap* _bitmap;
 612   Atomic<size_t> _processed;
 613   ShenandoahGeneration* _generation;
 614 
 615 public:
 616   ShenandoahVerifierReachableTask(ShenandoahGeneration* generation,
 617                                   MarkBitMap* bitmap,
 618                                   ShenandoahLivenessData* ld,
 619                                   const char* label,
 620                                   ShenandoahVerifier::VerifyOptions options) :
 621     WorkerTask("Shenandoah Verifier Reachable Objects"),
 622     _label(label),
 623     _options(options),
 624     _heap(ShenandoahHeap::heap()),
 625     _ld(ld),
 626     _bitmap(bitmap),
 627     _processed(0),
 628     _generation(generation) {};
 629 
 630   size_t processed() const {
 631     return _processed.load_relaxed();
 632   }
 633 
 634   void work(uint worker_id) override {
 635     ResourceMark rm;
 636     ShenandoahVerifierStack stack;
 637 
 638     // On level 2, we need to only check the roots once.
 639     // On level 3, we want to check the roots, and seed the local stack.
 640     // It is a lesser evil to accept multiple root scans at level 3, because
 641     // extended parallelism would buy us out.
 642     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 643         || (ShenandoahVerifyLevel >= 3)) {
 644         ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
 645                                       ShenandoahMessageBuffer("%s, Roots", _label),
 646                                       _options);
 647         if (_heap->unload_classes()) {
 648           ShenandoahRootVerifier::strong_roots_do(&cl, _generation);
 649         } else {
 650           ShenandoahRootVerifier::roots_do(&cl, _generation);
 651         }
 652     }
 653 
 654     size_t processed = 0;
 655 
 656     if (ShenandoahVerifyLevel >= 3) {
 657       ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
 658                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 659                                     _options);
 660       while (!stack.is_empty()) {
 661         processed++;
 662         ShenandoahVerifierTask task = stack.pop();
 663         cl.verify_oops_from(task.obj());
 664       }
 665     }
 666 
 667     _processed.add_then_fetch(processed, memory_order_relaxed);
 668   }
 669 };
 670 
 671 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 672   public:
 673   void do_thread(Thread* thread) override {
 674     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 675     if (!queue.is_empty()) {
 676       fatal("All SATB buffers should have been flushed during mark");
 677     }
 678   }
 679 };
 680 
 681 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 682   private:
 683   const char* _label;
 684   ShenandoahVerifier::VerifyOptions _options;
 685   ShenandoahHeap *_heap;
 686   MarkBitMap* _bitmap;
 687   ShenandoahLivenessData* _ld;
 688   Atomic<size_t> _claimed;
 689   Atomic<size_t> _processed;
 690   ShenandoahGeneration* _generation;
 691 
 692 public:
 693   ShenandoahVerifierMarkedRegionTask(ShenandoahGeneration* generation,
 694                                      MarkBitMap* bitmap,
 695                                      ShenandoahLivenessData* ld,
 696                                      const char* label,
 697                                      ShenandoahVerifier::VerifyOptions options) :
 698           WorkerTask("Shenandoah Verifier Marked Objects"),
 699           _label(label),
 700           _options(options),
 701           _heap(ShenandoahHeap::heap()),
 702           _bitmap(bitmap),
 703           _ld(ld),
 704           _claimed(0),
 705           _processed(0),
 706           _generation(generation) {}
 707 
 708   size_t processed() {
 709     return _processed.load_relaxed();
 710   }
 711 
 712   void work(uint worker_id) override {
 713     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 714       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 715       Threads::threads_do(&verify_satb);
 716     }
 717 
 718     ShenandoahVerifierStack stack;
 719     ShenandoahVerifyOopClosure cl(_generation, &stack, _bitmap, _ld,
 720                                   ShenandoahMessageBuffer("%s, Marked", _label),
 721                                   _options);
 722 
 723     while (true) {
 724       size_t v = _claimed.fetch_then_add(1u, memory_order_relaxed);
 725       if (v < _heap->num_regions()) {
 726         ShenandoahHeapRegion* r = _heap->get_region(v);
 727         if (!in_generation(r)) {
 728           continue;
 729         }
 730 
 731         if (!r->is_humongous() && !r->is_trash()) {
 732           work_regular(r, stack, cl);
 733         } else if (r->is_humongous_start()) {
 734           work_humongous(r, stack, cl);
 735         }
 736       } else {
 737         break;
 738       }
 739     }
 740   }
 741 
 742   bool in_generation(ShenandoahHeapRegion* r) const {
 743     return _generation->contains(r);
 744   }
 745 
 746   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 747     size_t processed = 0;
 748     HeapWord* obj = r->bottom();
 749     if (_generation->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 750       verify_and_follow(obj, stack, cl, &processed);
 751     }
 752     _processed.add_then_fetch(processed, memory_order_relaxed);
 753   }
 754 
 755   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 756     size_t processed = 0;
 757     ShenandoahMarkingContext* ctx = _generation->complete_marking_context();
 758     HeapWord* tams = ctx->top_at_mark_start(r);
 759 
 760     // Bitmaps, before TAMS
 761     if (tams > r->bottom()) {
 762       HeapWord* start = r->bottom();
 763       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 764 
 765       while (addr < tams) {
 766         verify_and_follow(addr, stack, cl, &processed);
 767         addr += 1;
 768         if (addr < tams) {
 769           addr = ctx->get_next_marked_addr(addr, tams);
 770         }
 771       }
 772     }
 773 
 774     // Size-based, after TAMS
 775     {
 776       HeapWord* limit = r->top();
 777       HeapWord* addr = tams;
 778 
 779       while (addr < limit) {
 780         verify_and_follow(addr, stack, cl, &processed);
 781         addr += ShenandoahForwarding::size(cast_to_oop(addr));
 782       }
 783     }
 784 
 785     _processed.add_then_fetch(processed, memory_order_relaxed);
 786   }
 787 
 788   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 789     if (!_bitmap->par_mark(addr)) return;
 790 
 791     // Verify the object itself:
 792     oop obj = cast_to_oop(addr);
 793     cl.verify_oop_standalone(obj);
 794 
 795     // Verify everything reachable from that object too, hopefully realizing
 796     // everything was already marked, and never touching further:
 797     if (!is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 798       cl.verify_oops_from(obj);
 799       (*processed)++;
 800     }
 801     while (!stack.is_empty()) {
 802       ShenandoahVerifierTask task = stack.pop();
 803       cl.verify_oops_from(task.obj());
 804       (*processed)++;
 805     }
 806   }
 807 };
 808 
 809 class VerifyThreadGCState : public ThreadClosure {
 810 private:
 811   const char* const _label;
 812   char const _expected;
 813 
 814 public:
 815   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 816   void do_thread(Thread* t) override {
 817     char actual = ShenandoahThreadLocalData::gc_state(t);
 818     if (!verify_gc_state(actual, _expected)) {
 819       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 820     }
 821   }
 822 
 823   static bool verify_gc_state(char actual, char expected) {
 824     // Old generation marking is allowed in all states.
 825     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 826       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 827     } else {
 828       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 829       return (actual == expected);
 830     }
 831   }
 832 };
 833 
 834 void ShenandoahVerifier::verify_at_safepoint(ShenandoahGeneration* generation,
 835                                              const char* label,
 836                                              VerifyRememberedSet remembered,
 837                                              VerifyForwarded forwarded,
 838                                              VerifyMarked marked,
 839                                              VerifyCollectionSet cset,
 840                                              VerifyLiveness liveness,
 841                                              VerifyRegions regions,
 842                                              VerifySize sizeness,
 843                                              VerifyGCState gcstate) {
 844   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 845   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 846 
 847   ShenandoahHeap::heap()->propagate_gc_state_to_all_threads();
 848 
 849   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 850   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 851 
 852   log_info(gc,start)("Verify %s, Level %zd", label, ShenandoahVerifyLevel);
 853 
 854   // GC state checks
 855   {
 856     char expected = -1;
 857     bool enabled;
 858     switch (gcstate) {
 859       case _verify_gcstate_disable:
 860         enabled = false;
 861         break;
 862       case _verify_gcstate_forwarded:
 863         enabled = true;
 864         expected = ShenandoahHeap::HAS_FORWARDED;
 865         break;
 866       case _verify_gcstate_updating:
 867         enabled = true;
 868         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATE_REFS;
 869         break;
 870       case _verify_gcstate_stable:
 871         enabled = true;
 872         expected = ShenandoahHeap::STABLE;
 873         break;
 874       case _verify_gcstate_stable_weakroots:
 875         enabled = true;
 876         expected = ShenandoahHeap::STABLE;
 877         if (!_heap->is_stw_gc_in_progress()) {
 878           // Only concurrent GC sets this.
 879           expected |= ShenandoahHeap::WEAK_ROOTS;
 880         }
 881         break;
 882       default:
 883         enabled = false;
 884         assert(false, "Unhandled gc-state verification");
 885     }
 886 
 887     if (enabled) {
 888       char actual = _heap->gc_state();
 889 
 890       bool is_marking = (actual & ShenandoahHeap::MARKING);
 891       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING));
 892       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 893 
 894       // Old generation marking is allowed in all states.
 895       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 896         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 897       }
 898 
 899       VerifyThreadGCState vtgcs(label, expected);
 900       Threads::java_threads_do(&vtgcs);
 901     }
 902   }
 903 
 904   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 905   ShenandoahGCStateResetter resetter;
 906 
 907   // Heap size checks
 908   {
 909     ShenandoahHeapLocker lock(_heap->lock());
 910 
 911     ShenandoahCalculateRegionStatsClosure cl;
 912     _heap->heap_region_iterate(&cl);
 913     size_t heap_used;
 914     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 915       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 916       // but this padding is already represented in _heap->used()
 917       heap_used = _heap->used();
 918     } else if (sizeness != _verify_size_disable) {
 919       heap_used = _heap->used();
 920     }
 921     if (sizeness != _verify_size_disable) {
 922       size_t cl_size = (sizeness == _verify_size_exact_including_trash)? cl.used(): cl.used_after_recycle();
 923       guarantee(cl_size == heap_used,
 924                 "%s: heap used size must be consistent: heap-used = %zu%s, regions-used = %zu%s",
 925                 label,
 926                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 927                 byte_size_in_proper_unit(cl_size), proper_unit_for_byte_size(cl_size));
 928     }
 929     size_t heap_committed = _heap->committed();
 930     guarantee(cl.committed() == heap_committed,
 931               "%s: heap committed size must be consistent: heap-committed = %zu%s, regions-committed = %zu%s",
 932               label,
 933               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 934               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 935   }
 936 
 937   log_debug(gc)("Safepoint verification finished heap usage verification");
 938 
 939   if (_heap->mode()->is_generational()) {
 940     ShenandoahHeapLocker lock(_heap->lock());
 941 
 942     switch (remembered) {
 943       case _verify_remembered_disable:
 944         break;
 945       case _verify_remembered_before_marking:
 946         log_debug(gc)("Safepoint verification of remembered set at mark");
 947         verify_rem_set_before_mark();
 948         break;
 949       case _verify_remembered_before_updating_references:
 950         log_debug(gc)("Safepoint verification of remembered set at update ref");
 951         verify_rem_set_before_update_ref();
 952         break;
 953       case _verify_remembered_after_full_gc:
 954         log_debug(gc)("Safepoint verification of remembered set after full gc");
 955         verify_rem_set_after_full_gc();
 956         break;
 957       default:
 958         fatal("Unhandled remembered set verification mode");
 959     }
 960 
 961     ShenandoahGenerationStatsClosure cl;
 962     _heap->heap_region_iterate(&cl);
 963 
 964     if (LogTarget(Debug, gc)::is_enabled()) {
 965       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl._old);
 966       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl._young);
 967       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl._global);
 968     }
 969     if (sizeness == _verify_size_adjusted_for_padding) {
 970       ShenandoahGenerationStatsClosure::validate_usage(false, true, label, _heap->old_generation(), cl._old);
 971       ShenandoahGenerationStatsClosure::validate_usage(true, true, label, _heap->young_generation(), cl._young);
 972       ShenandoahGenerationStatsClosure::validate_usage(true, true, label, _heap->global_generation(), cl._global);
 973     } else if (sizeness == _verify_size_exact || sizeness == _verify_size_exact_including_trash) {
 974       bool adjust_trash = (sizeness == _verify_size_exact);
 975       ShenandoahGenerationStatsClosure::validate_usage(false, adjust_trash, label, _heap->old_generation(), cl._old);
 976       ShenandoahGenerationStatsClosure::validate_usage(false, adjust_trash, label, _heap->young_generation(), cl._young);
 977       ShenandoahGenerationStatsClosure::validate_usage(false, adjust_trash, label, _heap->global_generation(), cl._global);
 978     }
 979     // else: sizeness must equal _verify_size_disable
 980   }
 981 
 982   log_debug(gc)("Safepoint verification finished remembered set verification");
 983 
 984   // Internal heap region checks
 985   if (ShenandoahVerifyLevel >= 1) {
 986     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 987     generation->heap_region_iterate(&cl);
 988   }
 989 
 990   log_debug(gc)("Safepoint verification finished heap region closure verification");
 991 
 992   OrderAccess::fence();
 993 
 994   if (UseTLAB) {
 995     _heap->labs_make_parsable();
 996   }
 997 
 998   // Allocate temporary bitmap for storing marking wavefront:
 999   _verification_bit_map->clear();
1000 
1001   // Allocate temporary array for storing liveness data
1002   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
1003   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
1004 
1005   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
1006 
1007   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
1008   // This verifies what application can see, since it only cares about reachable objects.
1009   size_t count_reachable = 0;
1010   if (ShenandoahVerifyLevel >= 2) {
1011     ShenandoahVerifierReachableTask task(generation, _verification_bit_map, ld, label, options);
1012     _heap->workers()->run_task(&task);
1013     count_reachable = task.processed();
1014   }
1015 
1016   log_debug(gc)("Safepoint verification finished getting initial reachable set");
1017 
1018   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
1019   // not the application, can see during the region scans. There is no reason to process the objects
1020   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
1021   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
1022   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
1023   // version
1024 
1025   size_t count_marked = 0;
1026   if (ShenandoahVerifyLevel >= 4 &&
1027         (marked == _verify_marked_complete ||
1028          marked == _verify_marked_complete_except_references ||
1029          marked == _verify_marked_complete_satb_empty)) {
1030     guarantee(generation->is_mark_complete(), "Marking context should be complete");
1031     ShenandoahVerifierMarkedRegionTask task(generation, _verification_bit_map, ld, label, options);
1032     _heap->workers()->run_task(&task);
1033     count_marked = task.processed();
1034   } else {
1035     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
1036   }
1037 
1038   log_debug(gc)("Safepoint verification finished walking marked objects");
1039 
1040   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
1041   // marked objects.
1042 
1043   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
1044     for (size_t i = 0; i < _heap->num_regions(); i++) {
1045       ShenandoahHeapRegion* r = _heap->get_region(i);
1046       if (!generation->contains(r)) {
1047         continue;
1048       }
1049 
1050       juint verf_live = 0;
1051       if (r->is_humongous()) {
1052         // For humongous objects, test if start region is marked live, and if so,
1053         // all humongous regions in that chain have live data equal to their "used".
1054         juint start_live = ld[r->humongous_start_region()->index()].load_relaxed();
1055         if (start_live > 0) {
1056           verf_live = (juint)(r->used() / HeapWordSize);
1057         }
1058       } else {
1059         verf_live = ld[r->index()].load_relaxed();
1060       }
1061 
1062       size_t reg_live = r->get_live_data_words();
1063       if (reg_live != verf_live) {
1064         stringStream ss;
1065         r->print_on(&ss);
1066         fatal("%s: Live data should match: region-live = %zu, verifier-live = " UINT32_FORMAT "\n%s",
1067               label, reg_live, verf_live, ss.freeze());
1068       }
1069     }
1070   }
1071 
1072   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1073   log_info(gc)("Verify %s, Level %zd (%zu reachable, %zu marked)",
1074                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1075 
1076   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1077 }
1078 
1079 void ShenandoahVerifier::verify_generic(ShenandoahGeneration* generation, VerifyOption vo) {
1080   verify_at_safepoint(
1081           generation,
1082           "Generic Verification",
1083           _verify_remembered_disable,  // do not verify remembered set
1084           _verify_forwarded_allow,     // conservatively allow forwarded
1085           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1086           _verify_cset_disable,        // cset may be inconsistent
1087           _verify_liveness_disable,    // no reliable liveness data
1088           _verify_regions_disable,     // no reliable region data
1089           _verify_size_disable,        // no reliable sizing data
1090           _verify_gcstate_disable      // no data about gcstate
1091   );
1092 }
1093 
1094 void ShenandoahVerifier::verify_before_concmark(ShenandoahGeneration* generation) {
1095   VerifyRememberedSet verify_remembered_set = _verify_remembered_before_marking;
1096   if (_heap->mode()->is_generational() &&
1097       !_heap->old_generation()->is_mark_complete()) {
1098     // Before marking in generational mode, remembered set can't be verified w/o complete old marking.
1099     verify_remembered_set = _verify_remembered_disable;
1100   }
1101   verify_at_safepoint(
1102           generation,
1103           "Before Mark",
1104           verify_remembered_set,
1105                                        // verify read-only remembered set from bottom() to top()
1106           _verify_forwarded_none,      // UR should have fixed up
1107           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1108           _verify_cset_none,           // UR should have fixed this
1109           _verify_liveness_disable,    // no reliable liveness data
1110           _verify_regions_notrash,     // no trash regions
1111           _verify_size_exact,          // expect generation and heap sizes to match exactly
1112           _verify_gcstate_stable       // there are no forwarded objects
1113   );
1114 }
1115 
1116 void ShenandoahVerifier::verify_after_concmark(ShenandoahGeneration* generation) {
1117   verify_at_safepoint(
1118           generation,
1119           "After Mark",
1120           _verify_remembered_disable,         // do not verify remembered set
1121           _verify_forwarded_none,             // no forwarded references
1122           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1123           _verify_cset_none,                  // no references to cset anymore
1124           _verify_liveness_complete,          // liveness data must be complete here
1125           _verify_regions_disable,            // trash regions not yet recycled
1126           _verify_size_exact,                 // expect generation and heap sizes to match exactly
1127           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1128   );
1129 }
1130 
1131 void ShenandoahVerifier::verify_after_concmark_with_promotions(ShenandoahGeneration* generation) {
1132   verify_at_safepoint(
1133           generation,
1134           "After Mark",
1135           _verify_remembered_disable,         // do not verify remembered set
1136           _verify_forwarded_none,             // no forwarded references
1137           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1138           _verify_cset_none,                  // no references to cset anymore
1139           _verify_liveness_complete,          // liveness data must be complete here
1140           _verify_regions_disable,            // trash regions not yet recycled
1141           _verify_size_adjusted_for_padding,  // expect generation and heap sizes to match after adjustments
1142                                               // for promote in place padding
1143           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1144   );
1145 }
1146 
1147 void ShenandoahVerifier::verify_before_evacuation(ShenandoahGeneration* generation) {
1148   verify_at_safepoint(
1149           generation,
1150           "Before Evacuation",
1151           _verify_remembered_disable,                // do not verify remembered set
1152           _verify_forwarded_none,                    // no forwarded references
1153           _verify_marked_complete_except_references, // walk over marked objects too
1154           _verify_cset_disable,                      // non-forwarded references to cset expected
1155           _verify_liveness_complete,                 // liveness data must be complete here
1156           _verify_regions_disable,                   // trash regions not yet recycled
1157           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1158                                                      //  for promote in place padding
1159           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1160   );
1161 }
1162 
1163 void ShenandoahVerifier::verify_before_update_refs(ShenandoahGeneration* generation) {
1164   VerifyRememberedSet verify_remembered_set = _verify_remembered_before_updating_references;
1165   if (_heap->mode()->is_generational() &&
1166       !_heap->old_generation()->is_mark_complete()) {
1167     verify_remembered_set = _verify_remembered_disable;
1168   }
1169   verify_at_safepoint(
1170           generation,
1171           "Before Updating References",
1172           verify_remembered_set,        // verify read-write remembered set
1173           _verify_forwarded_allow,     // forwarded references allowed
1174           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1175           _verify_cset_forwarded,      // all cset refs are fully forwarded
1176           _verify_liveness_disable,    // no reliable liveness data anymore
1177           _verify_regions_notrash,     // trash regions have been recycled already
1178           _verify_size_exact,          // expect generation and heap sizes to match exactly
1179           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1180   );
1181 }
1182 
1183 // We have not yet cleanup (reclaimed) the collection set
1184 void ShenandoahVerifier::verify_after_update_refs(ShenandoahGeneration* generation) {
1185   verify_at_safepoint(
1186           generation,
1187           "After Updating References",
1188           _verify_remembered_disable,  // do not verify remembered set
1189           _verify_forwarded_none,      // no forwarded references
1190           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1191           _verify_cset_none,           // no cset references, all updated
1192           _verify_liveness_disable,    // no reliable liveness data anymore
1193           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1194                                        // expect generation and heap sizes to match exactly, including trash
1195           _verify_size_exact_including_trash,
1196           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1197   );
1198 }
1199 
1200 void ShenandoahVerifier::verify_after_degenerated(ShenandoahGeneration* generation) {
1201   verify_at_safepoint(
1202           generation,
1203           "After Degenerated GC",
1204           _verify_remembered_disable,  // do not verify remembered set
1205           _verify_forwarded_none,      // all objects are non-forwarded
1206           _verify_marked_complete,     // all objects are marked in complete bitmap
1207           _verify_cset_none,           // no cset references
1208           _verify_liveness_disable,    // no reliable liveness data anymore
1209           _verify_regions_notrash_nocset, // no trash, no cset
1210           _verify_size_exact,          // expect generation and heap sizes to match exactly
1211           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1212   );
1213 }
1214 
1215 void ShenandoahVerifier::verify_before_fullgc(ShenandoahGeneration* generation) {
1216   verify_at_safepoint(
1217           generation,
1218           "Before Full GC",
1219           _verify_remembered_disable,  // do not verify remembered set
1220           _verify_forwarded_allow,     // can have forwarded objects
1221           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1222           _verify_cset_disable,        // cset might be foobared
1223           _verify_liveness_disable,    // no reliable liveness data anymore
1224           _verify_regions_disable,     // no reliable region data here
1225           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1226           _verify_gcstate_disable      // no reliable gcstate data
1227   );
1228 }
1229 
1230 void ShenandoahVerifier::verify_after_fullgc(ShenandoahGeneration* generation) {
1231   verify_at_safepoint(
1232           generation,
1233           "After Full GC",
1234           _verify_remembered_after_full_gc,  // verify read-write remembered set
1235           _verify_forwarded_none,      // all objects are non-forwarded
1236           _verify_marked_incomplete,   // all objects are marked in incomplete bitmap
1237           _verify_cset_none,           // no cset references
1238           _verify_liveness_disable,    // no reliable liveness data anymore
1239           _verify_regions_notrash_nocset, // no trash, no cset
1240           _verify_size_exact,           // expect generation and heap sizes to match exactly
1241           _verify_gcstate_stable        // full gc cleaned up everything
1242   );
1243 }
1244 
1245 class ShenandoahVerifyNoForwarded : public BasicOopIterateClosure {
1246 private:
1247   template <class T>
1248   void do_oop_work(T* p) {
1249     T o = RawAccess<>::oop_load(p);
1250     if (!CompressedOops::is_null(o)) {
1251       oop obj = CompressedOops::decode_raw_not_null(o);
1252       ShenandoahAsserts::assert_correct(p, obj, __FILE__, __LINE__);
1253 
1254       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1255       if (obj != fwd) {
1256         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1257                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1258       }
1259     }
1260   }
1261 
1262 public:
1263   void do_oop(narrowOop* p) { do_oop_work(p); }
1264   void do_oop(oop* p)       { do_oop_work(p); }
1265 };
1266 
1267 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1268 private:
1269   template <class T>
1270   void do_oop_work(T* p) {
1271     T o = RawAccess<>::oop_load(p);
1272     if (!CompressedOops::is_null(o)) {
1273       oop obj = CompressedOops::decode_raw_not_null(o);
1274       ShenandoahAsserts::assert_correct(p, obj, __FILE__, __LINE__);
1275 
1276       ShenandoahHeap* heap = ShenandoahHeap::heap();
1277 
1278       if (!heap->marking_context()->is_marked_or_old(obj)) {
1279         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1280                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1281       }
1282 
1283       if (heap->in_collection_set(obj)) {
1284         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1285                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1286       }
1287 
1288       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1289       if (obj != fwd) {
1290         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1291                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1292       }
1293     }
1294   }
1295 
1296 public:
1297   void do_oop(narrowOop* p) override { do_oop_work(p); }
1298   void do_oop(oop* p)       override { do_oop_work(p); }
1299 };
1300 
1301 void ShenandoahVerifier::verify_roots_in_to_space(ShenandoahGeneration* generation) {
1302   ShenandoahVerifyInToSpaceClosure cl;
1303   ShenandoahRootVerifier::roots_do(&cl, generation);
1304 }
1305 
1306 void ShenandoahVerifier::verify_roots_no_forwarded(ShenandoahGeneration* generation) {
1307   ShenandoahVerifyNoForwarded cl;
1308   ShenandoahRootVerifier::roots_do(&cl, generation);
1309 }
1310 
1311 template<typename Scanner>
1312 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1313 protected:
1314   ShenandoahGenerationalHeap* const _heap;
1315   Scanner*   const _scanner;
1316   const char* _message;
1317 
1318 public:
1319   // Argument distinguishes between initial mark or start of update refs verification.
1320   explicit ShenandoahVerifyRemSetClosure(Scanner* scanner, const char* message) :
1321             _heap(ShenandoahGenerationalHeap::heap()),
1322             _scanner(scanner),
1323             _message(message) {}
1324 
1325   template<class T>
1326   inline void work(T* p) {
1327     T o = RawAccess<>::oop_load(p);
1328     if (!CompressedOops::is_null(o)) {
1329       oop obj = CompressedOops::decode_raw_not_null(o);
1330       ShenandoahAsserts::assert_correct(p, obj, __FILE__, __LINE__);
1331 
1332       if (_heap->is_in_young(obj) && !_scanner->is_card_dirty((HeapWord*) p)) {
1333         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1334                                          _message, "clean card, it should be dirty.", __FILE__, __LINE__);
1335       }
1336     }
1337   }
1338 
1339   void do_oop(narrowOop* p) override { work(p); }
1340   void do_oop(oop* p)       override { work(p); }
1341 };
1342 
1343 template<typename Scanner>
1344 void ShenandoahVerifier::help_verify_region_rem_set(Scanner* scanner, ShenandoahHeapRegion* r,
1345                                                     HeapWord* registration_watermark, const char* message) {
1346   ShenandoahOldGeneration* old_gen = _heap->old_generation();
1347   assert(old_gen->is_mark_complete() || old_gen->is_parsable(), "Sanity");
1348 
1349   ShenandoahMarkingContext* ctx = old_gen->is_mark_complete() ? old_gen->complete_marking_context() : nullptr;
1350   ShenandoahVerifyRemSetClosure<Scanner> check_interesting_pointers(scanner, message);
1351   HeapWord* from = r->bottom();
1352   HeapWord* obj_addr = from;
1353   if (r->is_humongous_start()) {
1354     oop obj = cast_to_oop(obj_addr);
1355     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1356       // For humongous objects, the typical object is an array, so the following checks may be overkill
1357       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1358       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1359       if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1360         obj->oop_iterate(&check_interesting_pointers);
1361       }
1362       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1363     }
1364     // else, this humongous object is not live so no need to verify its internal pointers
1365 
1366     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1367       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1368                                        "object not properly registered", __FILE__, __LINE__);
1369     }
1370   } else if (!r->is_humongous()) {
1371     HeapWord* top = r->top();
1372     while (obj_addr < top) {
1373       oop obj = cast_to_oop(obj_addr);
1374       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1375       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1376         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1377         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1378         if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1379           obj->oop_iterate(&check_interesting_pointers);
1380         }
1381         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1382 
1383         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1384           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1385                                            "object not properly registered", __FILE__, __LINE__);
1386         }
1387         obj_addr += obj->size();
1388       } else {
1389         // This object is not live so we don't verify dirty cards contained therein
1390         HeapWord* tams = ctx->top_at_mark_start(r);
1391         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1392       }
1393     }
1394   }
1395 }
1396 
1397 class ShenandoahWriteTableScanner {
1398 private:
1399   ShenandoahScanRemembered* _scanner;
1400 public:
1401   explicit ShenandoahWriteTableScanner(ShenandoahScanRemembered* scanner) : _scanner(scanner) {}
1402 
1403   bool is_card_dirty(HeapWord* obj_addr) {
1404     return _scanner->is_write_card_dirty(obj_addr);
1405   }
1406 
1407   bool verify_registration(HeapWord* obj_addr, ShenandoahMarkingContext* ctx) {
1408     return _scanner->verify_registration(obj_addr, ctx);
1409   }
1410 };
1411 
1412 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1413 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1414 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1415 // restore them until the start of evacuation.
1416 void ShenandoahVerifier::verify_rem_set_before_mark() {
1417   shenandoah_assert_safepoint();
1418   shenandoah_assert_generational();
1419 
1420   ShenandoahOldGeneration* old_generation = _heap->old_generation();
1421 
1422   log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young");
1423 
1424   ShenandoahScanRemembered* scanner = old_generation->card_scan();
1425   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1426     ShenandoahHeapRegion* r = _heap->get_region(i);
1427     if (r->is_old() && r->is_active()) {
1428       help_verify_region_rem_set(scanner, r, r->end(), "Verify init-mark remembered set violation");
1429     }
1430   }
1431 }
1432 
1433 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1434   shenandoah_assert_safepoint();
1435   shenandoah_assert_generational();
1436 
1437   ShenandoahWriteTableScanner scanner(ShenandoahGenerationalHeap::heap()->old_generation()->card_scan());
1438   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1439     ShenandoahHeapRegion* r = _heap->get_region(i);
1440     if (r->is_old() && !r->is_cset()) {
1441       help_verify_region_rem_set(&scanner, r, r->top(), "Remembered set violation at end of Full GC");
1442     }
1443   }
1444 }
1445 
1446 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1447 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1448 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1449 // all PLABS are retired immediately before the start of update refs.
1450 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1451   shenandoah_assert_safepoint();
1452   shenandoah_assert_generational();
1453 
1454   ShenandoahWriteTableScanner scanner(_heap->old_generation()->card_scan());
1455   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1456     ShenandoahHeapRegion* r = _heap->get_region(i);
1457     if (r->is_old() && !r->is_cset()) {
1458       help_verify_region_rem_set(&scanner, r, r->get_update_watermark(), "Remembered set violation at init-update-references");
1459     }
1460   }
1461 }
1462 
1463 void ShenandoahVerifier::verify_before_rebuilding_free_set() {
1464   ShenandoahGenerationStatsClosure cl;
1465   _heap->heap_region_iterate(&cl);
1466 
1467   ShenandoahGenerationStatsClosure::validate_usage(false, true, "Before free set rebuild", _heap->old_generation(), cl._old);
1468   ShenandoahGenerationStatsClosure::validate_usage(false, true, "Before free set rebuild", _heap->young_generation(), cl._young);
1469   ShenandoahGenerationStatsClosure::validate_usage(false, true, "Before free set rebuild", _heap->global_generation(), cl._global);
1470 }