1 /*
   2  * Copyright (c) 2017, 2025, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
   5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   6  *
   7  * This code is free software; you can redistribute it and/or modify it
   8  * under the terms of the GNU General Public License version 2 only, as
   9  * published by the Free Software Foundation.
  10  *
  11  * This code is distributed in the hope that it will be useful, but WITHOUT
  12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  14  * version 2 for more details (a copy is included in the LICENSE file that
  15  * accompanied this code).
  16  *
  17  * You should have received a copy of the GNU General Public License version
  18  * 2 along with this work; if not, write to the Free Software Foundation,
  19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  20  *
  21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  22  * or visit www.oracle.com if you need additional information or have any
  23  * questions.
  24  *
  25  */
  26 
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahGeneration.hpp"
  31 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  32 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  33 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  34 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
  37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  38 #include "gc/shenandoah/shenandoahUtils.hpp"
  39 #include "gc/shenandoah/shenandoahVerifier.hpp"
  40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  41 #include "memory/allocation.hpp"
  42 #include "memory/iterator.inline.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/compressedOops.inline.hpp"
  45 #include "runtime/atomic.hpp"
  46 #include "runtime/orderAccess.hpp"
  47 #include "runtime/threads.hpp"
  48 #include "utilities/align.hpp"
  49 
  50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  51 #ifdef verify_oop
  52 #undef verify_oop
  53 #endif
  54 
  55 static bool is_instance_ref_klass(Klass* k) {
  56   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  57 }
  58 
  59 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  60 private:
  61   const char* _phase;
  62   ShenandoahVerifier::VerifyOptions _options;
  63   ShenandoahVerifierStack* _stack;
  64   ShenandoahHeap* _heap;
  65   MarkBitMap* _map;
  66   ShenandoahLivenessData* _ld;
  67   void* _interior_loc;
  68   oop _loc;
  69   ReferenceIterationMode _ref_mode;
  70   ShenandoahGeneration* _generation;
  71 
  72 public:
  73   ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
  74                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  75     _phase(phase),
  76     _options(options),
  77     _stack(stack),
  78     _heap(ShenandoahHeap::heap()),
  79     _map(map),
  80     _ld(ld),
  81     _interior_loc(nullptr),
  82     _loc(nullptr),
  83     _generation(nullptr) {
  84     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  85         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  86         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  87       // Unknown status for Reference.referent field. Do not touch it, it might be dead.
  88       // Normally, barriers would prevent us from seeing the dead referents, but verifier
  89       // runs with barriers disabled.
  90       _ref_mode = DO_FIELDS_EXCEPT_REFERENT;
  91     } else {
  92       // Otherwise do all fields.
  93       _ref_mode = DO_FIELDS;
  94     }
  95 
  96     if (_heap->mode()->is_generational()) {
  97       _generation = _heap->gc_generation();
  98       assert(_generation != nullptr, "Expected active generation in this mode");
  99       shenandoah_assert_generations_reconciled();
 100     }
 101   }
 102 
 103   ReferenceIterationMode reference_iteration_mode() override {
 104     return _ref_mode;
 105   }
 106 
 107 private:
 108   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 109     if (!test) {
 110       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 111     }
 112   }
 113 
 114   template <class T>
 115   void do_oop_work(T* p) {
 116     T o = RawAccess<>::oop_load(p);
 117     if (!CompressedOops::is_null(o)) {
 118       oop obj = CompressedOops::decode_not_null(o);
 119       if (is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 120         obj = ShenandoahForwarding::get_forwardee(obj);
 121       }
 122       // Single threaded verification can use faster non-atomic stack and bitmap
 123       // methods.
 124       //
 125       // For performance reasons, only fully verify non-marked field values.
 126       // We are here when the host object for *p is already marked.
 127       if (in_generation(obj) && _map->par_mark(obj)) {
 128         verify_oop_at(p, obj);
 129         _stack->push(ShenandoahVerifierTask(obj));
 130       }
 131     }
 132   }
 133 
 134   bool in_generation(oop obj) {
 135     if (_generation == nullptr) {
 136       return true;
 137     }
 138 
 139     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 140     return _generation->contains(region);
 141   }
 142 
 143   void verify_oop(oop obj) {
 144     // Perform consistency checks with gradually decreasing safety level. This guarantees
 145     // that failure report would not try to touch something that was not yet verified to be
 146     // safe to process.
 147 
 148     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in_reserved(obj),
 149               "oop must be in heap bounds");
 150     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 151               "oop must be aligned");
 152     check(ShenandoahAsserts::_safe_unknown, obj, os::is_readable_pointer(obj),
 153               "oop must be accessible");
 154 
 155     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 156 
 157     narrowKlass nk = 0;
 158     const Klass* obj_klass = nullptr;
 159     const bool klass_valid = ShenandoahAsserts::extract_klass_safely(obj, nk, obj_klass);
 160 
 161     check(ShenandoahAsserts::_safe_unknown, obj, klass_valid,
 162            "Object klass pointer unreadable or invalid");
 163 
 164     // Verify that obj is not in dead space:
 165     {
 166       // Do this before touching obj->size()
 167       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 168              "Object klass pointer must go to metaspace");
 169 
 170       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 171       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 172              "Object start should be within the region");
 173 
 174       if (!obj_reg->is_humongous()) {
 175         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + ShenandoahForwarding::size(obj)) <= obj_reg->top(),
 176                "Object end should be within the region");
 177       } else {
 178         size_t humongous_start = obj_reg->index();
 179         size_t humongous_end = humongous_start + (ShenandoahForwarding::size(obj) >> ShenandoahHeapRegion::region_size_words_shift());
 180         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 181           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 182                  "Humongous object is in continuation that fits it");
 183         }
 184       }
 185 
 186       // ------------ obj is safe at this point --------------
 187 
 188       check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(),
 189             "Object should be in active region");
 190 
 191       switch (_options._verify_liveness) {
 192         case ShenandoahVerifier::_verify_liveness_disable:
 193           // skip
 194           break;
 195         case ShenandoahVerifier::_verify_liveness_complete:
 196           Atomic::add(&_ld[obj_reg->index()], (uint) ShenandoahForwarding::size(obj), memory_order_relaxed);
 197           // fallthrough for fast failure for un-live regions:
 198         case ShenandoahVerifier::_verify_liveness_conservative:
 199           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 200                 (obj_reg->is_old() && _heap->gc_generation()->is_young()),
 201                    "Object must belong to region with live data");
 202           shenandoah_assert_generations_reconciled();
 203           break;
 204         default:
 205           assert(false, "Unhandled liveness verification");
 206       }
 207     }
 208 
 209     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 210 
 211     ShenandoahHeapRegion* fwd_reg = nullptr;
 212 
 213     if (obj != fwd) {
 214       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in_reserved(fwd),
 215              "Forwardee must be in heap bounds");
 216       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 217              "Forwardee is set");
 218       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 219              "Forwardee must be aligned");
 220 
 221       // Do this before touching fwd->size()
 222       Klass* fwd_klass = fwd->klass_or_null();
 223       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 224              "Forwardee klass pointer should not be null");
 225       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 226              "Forwardee klass pointer must go to metaspace");
 227       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 228              "Forwardee klass pointer must go to metaspace");
 229 
 230       fwd_reg = _heap->heap_region_containing(fwd);
 231 
 232       check(ShenandoahAsserts::_safe_oop, obj, fwd_reg->is_active(),
 233             "Forwardee should be in active region");
 234 
 235       // Verify that forwardee is not in the dead space:
 236       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 237              "Should have no humongous forwardees");
 238 
 239       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 240       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 241              "Forwardee start should be within the region");
 242       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + ShenandoahForwarding::size(fwd)) <= fwd_reg->top(),
 243              "Forwardee end should be within the region");
 244 
 245       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 246       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 247              "Double forwarding");
 248     } else {
 249       fwd_reg = obj_reg;
 250     }
 251 
 252     // Do additional checks for special objects: their fields can hold metadata as well.
 253     // We want to check class loading/unloading did not corrupt them. We can only reasonably
 254     // trust the forwarded objects, as the from-space object can have the klasses effectively
 255     // dead.
 256 
 257     if (obj_klass == vmClasses::Class_klass()) {
 258       const Metadata* klass = fwd->metadata_field(java_lang_Class::klass_offset());
 259       check(ShenandoahAsserts::_safe_oop, obj,
 260             klass == nullptr || Metaspace::contains(klass),
 261             "Mirrored instance class should point to Metaspace");
 262 
 263       const Metadata* array_klass = obj->metadata_field(java_lang_Class::array_klass_offset());
 264       check(ShenandoahAsserts::_safe_oop, obj,
 265             array_klass == nullptr || Metaspace::contains(array_klass),
 266             "Mirrored array class should point to Metaspace");
 267     }
 268 
 269     // ------------ obj and fwd are safe at this point --------------
 270     switch (_options._verify_marked) {
 271       case ShenandoahVerifier::_verify_marked_disable:
 272         // skip
 273         break;
 274       case ShenandoahVerifier::_verify_marked_incomplete:
 275         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked(obj),
 276                "Must be marked in incomplete bitmap");
 277         break;
 278       case ShenandoahVerifier::_verify_marked_complete:
 279         check(ShenandoahAsserts::_safe_all, obj, _heap->gc_generation()->complete_marking_context()->is_marked(obj),
 280                "Must be marked in complete bitmap");
 281         break;
 282       case ShenandoahVerifier::_verify_marked_complete_except_references:
 283       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 284         check(ShenandoahAsserts::_safe_all, obj, _heap->gc_generation()->complete_marking_context()->is_marked(obj),
 285               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 286         break;
 287       default:
 288         assert(false, "Unhandled mark verification");
 289     }
 290 
 291     switch (_options._verify_forwarded) {
 292       case ShenandoahVerifier::_verify_forwarded_disable:
 293         // skip
 294         break;
 295       case ShenandoahVerifier::_verify_forwarded_none: {
 296         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 297                "Should not be forwarded");
 298         break;
 299       }
 300       case ShenandoahVerifier::_verify_forwarded_allow: {
 301         if (obj != fwd) {
 302           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 303                  "Forwardee should be in another region");
 304         }
 305         break;
 306       }
 307       default:
 308         assert(false, "Unhandled forwarding verification");
 309     }
 310 
 311     switch (_options._verify_cset) {
 312       case ShenandoahVerifier::_verify_cset_disable:
 313         // skip
 314         break;
 315       case ShenandoahVerifier::_verify_cset_none:
 316         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 317                "Should not have references to collection set");
 318         break;
 319       case ShenandoahVerifier::_verify_cset_forwarded:
 320         if (_heap->in_collection_set(obj)) {
 321           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 322                  "Object in collection set, should have forwardee");
 323         }
 324         break;
 325       default:
 326         assert(false, "Unhandled cset verification");
 327     }
 328 
 329   }
 330 
 331 public:
 332   /**
 333    * Verify object with known interior reference.
 334    * @param p interior reference where the object is referenced from; can be off-heap
 335    * @param obj verified object
 336    */
 337   template <class T>
 338   void verify_oop_at(T* p, oop obj) {
 339     _interior_loc = p;
 340     verify_oop(obj);
 341     _interior_loc = nullptr;
 342   }
 343 
 344   /**
 345    * Verify object without known interior reference.
 346    * Useful when picking up the object at known offset in heap,
 347    * but without knowing what objects reference it.
 348    * @param obj verified object
 349    */
 350   void verify_oop_standalone(oop obj) {
 351     _interior_loc = nullptr;
 352     verify_oop(obj);
 353     _interior_loc = nullptr;
 354   }
 355 
 356   /**
 357    * Verify oop fields from this object.
 358    * @param obj host object for verified fields
 359    */
 360   void verify_oops_from(oop obj) {
 361     _loc = obj;
 362     // oop_iterate() can not deal with forwarded objects, because
 363     // it needs to load klass(), which may be overridden by the
 364     // forwarding pointer.
 365     oop fwd = ShenandoahForwarding::get_forwardee_raw(obj);
 366     fwd->oop_iterate(this);
 367     _loc = nullptr;
 368   }
 369 
 370   void do_oop(oop* p) override { do_oop_work(p); }
 371   void do_oop(narrowOop* p) override { do_oop_work(p); }
 372 };
 373 
 374 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 375 // a subset (e.g. the young generation or old generation) of the total heap.
 376 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 377 private:
 378   size_t _used, _committed, _garbage, _regions, _humongous_waste, _trashed_regions;
 379 public:
 380   ShenandoahCalculateRegionStatsClosure() :
 381       _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0), _trashed_regions(0) {};
 382 
 383   void heap_region_do(ShenandoahHeapRegion* r) override {
 384     _used += r->used();
 385     _garbage += r->garbage();
 386     _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0;
 387     if (r->is_humongous()) {
 388       _humongous_waste += r->free();
 389     }
 390     if (r->is_trash()) {
 391       _trashed_regions++;
 392     }
 393     _regions++;
 394     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding %zu for %s Region %zu, yielding: %zu",
 395             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 396   }
 397 
 398   size_t used() const { return _used; }
 399   size_t committed() const { return _committed; }
 400   size_t garbage() const { return _garbage; }
 401   size_t regions() const { return _regions; }
 402   size_t waste() const { return _humongous_waste; }
 403 
 404   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 405   size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 406   size_t non_trashed_span() const { return (_regions - _trashed_regions) * ShenandoahHeapRegion::region_size_bytes(); }
 407 };
 408 
 409 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 410  public:
 411   ShenandoahCalculateRegionStatsClosure old;
 412   ShenandoahCalculateRegionStatsClosure young;
 413   ShenandoahCalculateRegionStatsClosure global;
 414 
 415   void heap_region_do(ShenandoahHeapRegion* r) override {
 416     switch (r->affiliation()) {
 417       case FREE:
 418         return;
 419       case YOUNG_GENERATION:
 420         young.heap_region_do(r);
 421         global.heap_region_do(r);
 422         break;
 423       case OLD_GENERATION:
 424         old.heap_region_do(r);
 425         global.heap_region_do(r);
 426         break;
 427       default:
 428         ShouldNotReachHere();
 429     }
 430   }
 431 
 432   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 433     log_debug(gc)("Safepoint verification: %s verified usage: %zu%s, recorded usage: %zu%s",
 434                   generation->name(),
 435                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 436                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 437   }
 438 
 439   static void validate_usage(const bool adjust_for_padding,
 440                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 441     ShenandoahHeap* heap = ShenandoahHeap::heap();
 442     size_t generation_used = generation->used();
 443     size_t generation_used_regions = generation->used_regions();
 444     if (adjust_for_padding && (generation->is_young() || generation->is_global())) {
 445       size_t pad = heap->old_generation()->get_pad_for_promote_in_place();
 446       generation_used += pad;
 447     }
 448 
 449     guarantee(stats.used() == generation_used,
 450               "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT,
 451               label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats.used()));
 452 
 453     guarantee(stats.regions() == generation_used_regions,
 454               "%s: generation (%s) used regions (%zu) must equal regions that are in use (%zu)",
 455               label, generation->name(), generation->used_regions(), stats.regions());
 456 
 457     size_t generation_capacity = generation->max_capacity();
 458     guarantee(stats.non_trashed_span() <= generation_capacity,
 459               "%s: generation (%s) size spanned by regions (%zu) * region size (" PROPERFMT
 460               ") must not exceed current capacity (" PROPERFMT ")",
 461               label, generation->name(), stats.regions(), PROPERFMTARGS(ShenandoahHeapRegion::region_size_bytes()),
 462               PROPERFMTARGS(generation_capacity));
 463 
 464     size_t humongous_waste = generation->get_humongous_waste();
 465     guarantee(stats.waste() == humongous_waste,
 466               "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT,
 467               label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste()));
 468   }
 469 };
 470 
 471 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 472 private:
 473   ShenandoahHeap* _heap;
 474   const char* _phase;
 475   ShenandoahVerifier::VerifyRegions _regions;
 476 public:
 477   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 478     _heap(ShenandoahHeap::heap()),
 479     _phase(phase),
 480     _regions(regions) {};
 481 
 482   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 483     ResourceMark rm;
 484 
 485     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 486 
 487     stringStream ss;
 488     r->print_on(&ss);
 489     msg.append("%s", ss.as_string());
 490 
 491     report_vm_error(__FILE__, __LINE__, msg.buffer());
 492   }
 493 
 494   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 495     if (!test) {
 496       print_failure(r, msg);
 497     }
 498   }
 499 
 500   void heap_region_do(ShenandoahHeapRegion* r) override {
 501     switch (_regions) {
 502       case ShenandoahVerifier::_verify_regions_disable:
 503         break;
 504       case ShenandoahVerifier::_verify_regions_notrash:
 505         verify(r, !r->is_trash(),
 506                "Should not have trash regions");
 507         break;
 508       case ShenandoahVerifier::_verify_regions_nocset:
 509         verify(r, !r->is_cset(),
 510                "Should not have cset regions");
 511         break;
 512       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 513         verify(r, !r->is_trash(),
 514                "Should not have trash regions");
 515         verify(r, !r->is_cset(),
 516                "Should not have cset regions");
 517         break;
 518       default:
 519         ShouldNotReachHere();
 520     }
 521 
 522     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 523            "Capacity should match region size");
 524 
 525     verify(r, r->bottom() <= r->top(),
 526            "Region top should not be less than bottom");
 527 
 528     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 529            "Region TAMS should not be less than bottom");
 530 
 531     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 532            "Complete TAMS should not be larger than top");
 533 
 534     verify(r, r->get_live_data_bytes() <= r->capacity(),
 535            "Live data cannot be larger than capacity");
 536 
 537     verify(r, r->garbage() <= r->capacity(),
 538            "Garbage cannot be larger than capacity");
 539 
 540     verify(r, r->used() <= r->capacity(),
 541            "Used cannot be larger than capacity");
 542 
 543     verify(r, r->get_shared_allocs() <= r->capacity(),
 544            "Shared alloc count should not be larger than capacity");
 545 
 546     verify(r, r->get_tlab_allocs() <= r->capacity(),
 547            "TLAB alloc count should not be larger than capacity");
 548 
 549     verify(r, r->get_gclab_allocs() <= r->capacity(),
 550            "GCLAB alloc count should not be larger than capacity");
 551 
 552     verify(r, r->get_plab_allocs() <= r->capacity(),
 553            "PLAB alloc count should not be larger than capacity");
 554 
 555     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 556            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 557 
 558     verify(r, !r->is_empty() || !r->has_live(),
 559            "Empty regions should not have live data");
 560 
 561     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 562            "Transitional: region flags and collection set agree");
 563   }
 564 };
 565 
 566 class ShenandoahVerifierReachableTask : public WorkerTask {
 567 private:
 568   const char* _label;
 569   ShenandoahVerifier::VerifyOptions _options;
 570   ShenandoahHeap* _heap;
 571   ShenandoahLivenessData* _ld;
 572   MarkBitMap* _bitmap;
 573   volatile size_t _processed;
 574 
 575 public:
 576   ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
 577                                   ShenandoahLivenessData* ld,
 578                                   const char* label,
 579                                   ShenandoahVerifier::VerifyOptions options) :
 580     WorkerTask("Shenandoah Verifier Reachable Objects"),
 581     _label(label),
 582     _options(options),
 583     _heap(ShenandoahHeap::heap()),
 584     _ld(ld),
 585     _bitmap(bitmap),
 586     _processed(0) {};
 587 
 588   size_t processed() const {
 589     return _processed;
 590   }
 591 
 592   void work(uint worker_id) override {
 593     ResourceMark rm;
 594     ShenandoahVerifierStack stack;
 595 
 596     // On level 2, we need to only check the roots once.
 597     // On level 3, we want to check the roots, and seed the local stack.
 598     // It is a lesser evil to accept multiple root scans at level 3, because
 599     // extended parallelism would buy us out.
 600     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 601         || (ShenandoahVerifyLevel >= 3)) {
 602         ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 603                                       ShenandoahMessageBuffer("%s, Roots", _label),
 604                                       _options);
 605         if (_heap->unload_classes()) {
 606           ShenandoahRootVerifier::strong_roots_do(&cl);
 607         } else {
 608           ShenandoahRootVerifier::roots_do(&cl);
 609         }
 610     }
 611 
 612     size_t processed = 0;
 613 
 614     if (ShenandoahVerifyLevel >= 3) {
 615       ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 616                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 617                                     _options);
 618       while (!stack.is_empty()) {
 619         processed++;
 620         ShenandoahVerifierTask task = stack.pop();
 621         cl.verify_oops_from(task.obj());
 622       }
 623     }
 624 
 625     Atomic::add(&_processed, processed, memory_order_relaxed);
 626   }
 627 };
 628 
 629 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 630 public:
 631   void do_thread(Thread* thread) override {
 632     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 633     if (!queue.is_empty()) {
 634       fatal("All SATB buffers should have been flushed during mark");
 635     }
 636   }
 637 };
 638 
 639 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 640 private:
 641   const char* _label;
 642   ShenandoahVerifier::VerifyOptions _options;
 643   ShenandoahHeap *_heap;
 644   MarkBitMap* _bitmap;
 645   ShenandoahLivenessData* _ld;
 646   volatile size_t _claimed;
 647   volatile size_t _processed;
 648   ShenandoahGeneration* _generation;
 649 
 650 public:
 651   ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
 652                                      ShenandoahLivenessData* ld,
 653                                      const char* label,
 654                                      ShenandoahVerifier::VerifyOptions options) :
 655           WorkerTask("Shenandoah Verifier Marked Objects"),
 656           _label(label),
 657           _options(options),
 658           _heap(ShenandoahHeap::heap()),
 659           _bitmap(bitmap),
 660           _ld(ld),
 661           _claimed(0),
 662           _processed(0),
 663           _generation(nullptr) {
 664     if (_heap->mode()->is_generational()) {
 665       _generation = _heap->gc_generation();
 666       assert(_generation != nullptr, "Expected active generation in this mode.");
 667       shenandoah_assert_generations_reconciled();
 668     }
 669   };
 670 
 671   size_t processed() {
 672     return Atomic::load(&_processed);
 673   }
 674 
 675   void work(uint worker_id) override {
 676     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 677       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 678       Threads::threads_do(&verify_satb);
 679     }
 680 
 681     ShenandoahVerifierStack stack;
 682     ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 683                                   ShenandoahMessageBuffer("%s, Marked", _label),
 684                                   _options);
 685 
 686     while (true) {
 687       size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed);
 688       if (v < _heap->num_regions()) {
 689         ShenandoahHeapRegion* r = _heap->get_region(v);
 690         if (!in_generation(r)) {
 691           continue;
 692         }
 693 
 694         if (!r->is_humongous() && !r->is_trash()) {
 695           work_regular(r, stack, cl);
 696         } else if (r->is_humongous_start()) {
 697           work_humongous(r, stack, cl);
 698         }
 699       } else {
 700         break;
 701       }
 702     }
 703   }
 704 
 705   bool in_generation(ShenandoahHeapRegion* r) {
 706     return _generation == nullptr || _generation->contains(r);
 707   }
 708 
 709   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 710     size_t processed = 0;
 711     HeapWord* obj = r->bottom();
 712     if (_heap->gc_generation()->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 713       verify_and_follow(obj, stack, cl, &processed);
 714     }
 715     Atomic::add(&_processed, processed, memory_order_relaxed);
 716   }
 717 
 718   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 719     size_t processed = 0;
 720     ShenandoahMarkingContext* ctx = _heap->gc_generation()->complete_marking_context();
 721     HeapWord* tams = ctx->top_at_mark_start(r);
 722 
 723     // Bitmaps, before TAMS
 724     if (tams > r->bottom()) {
 725       HeapWord* start = r->bottom();
 726       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 727 
 728       while (addr < tams) {
 729         verify_and_follow(addr, stack, cl, &processed);
 730         addr += 1;
 731         if (addr < tams) {
 732           addr = ctx->get_next_marked_addr(addr, tams);
 733         }
 734       }
 735     }
 736 
 737     // Size-based, after TAMS
 738     {
 739       HeapWord* limit = r->top();
 740       HeapWord* addr = tams;
 741 
 742       while (addr < limit) {
 743         verify_and_follow(addr, stack, cl, &processed);
 744         addr += ShenandoahForwarding::size(cast_to_oop(addr));
 745       }
 746     }
 747 
 748     Atomic::add(&_processed, processed, memory_order_relaxed);
 749   }
 750 
 751   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 752     if (!_bitmap->par_mark(addr)) return;
 753 
 754     // Verify the object itself:
 755     oop obj = cast_to_oop(addr);
 756     cl.verify_oop_standalone(obj);
 757 
 758     // Verify everything reachable from that object too, hopefully realizing
 759     // everything was already marked, and never touching further:
 760     if (!is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 761       cl.verify_oops_from(obj);
 762       (*processed)++;
 763     }
 764     while (!stack.is_empty()) {
 765       ShenandoahVerifierTask task = stack.pop();
 766       cl.verify_oops_from(task.obj());
 767       (*processed)++;
 768     }
 769   }
 770 };
 771 
 772 class VerifyThreadGCState : public ThreadClosure {
 773 private:
 774   const char* const _label;
 775          char const _expected;
 776 
 777 public:
 778   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 779   void do_thread(Thread* t) override {
 780     char actual = ShenandoahThreadLocalData::gc_state(t);
 781     if (!verify_gc_state(actual, _expected)) {
 782       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 783     }
 784   }
 785 
 786   static bool verify_gc_state(char actual, char expected) {
 787     // Old generation marking is allowed in all states.
 788     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 789       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 790     } else {
 791       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 792       return (actual == expected);
 793     }
 794   }
 795 };
 796 
 797 void ShenandoahVerifier::verify_at_safepoint(const char* label,
 798                                              VerifyRememberedSet remembered,
 799                                              VerifyForwarded forwarded,
 800                                              VerifyMarked marked,
 801                                              VerifyCollectionSet cset,
 802                                              VerifyLiveness liveness,
 803                                              VerifyRegions regions,
 804                                              VerifySize sizeness,
 805                                              VerifyGCState gcstate) {
 806   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 807   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 808 
 809   ShenandoahHeap::heap()->propagate_gc_state_to_all_threads();
 810 
 811   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 812   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 813 
 814   log_info(gc,start)("Verify %s, Level %zd", label, ShenandoahVerifyLevel);
 815 
 816   // GC state checks
 817   {
 818     char expected = -1;
 819     bool enabled;
 820     switch (gcstate) {
 821       case _verify_gcstate_disable:
 822         enabled = false;
 823         break;
 824       case _verify_gcstate_forwarded:
 825         enabled = true;
 826         expected = ShenandoahHeap::HAS_FORWARDED;
 827         break;
 828       case _verify_gcstate_updating:
 829         enabled = true;
 830         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATE_REFS;
 831         break;
 832       case _verify_gcstate_stable:
 833         enabled = true;
 834         expected = ShenandoahHeap::STABLE;
 835         break;
 836       case _verify_gcstate_stable_weakroots:
 837         enabled = true;
 838         expected = ShenandoahHeap::STABLE;
 839         if (!_heap->is_stw_gc_in_progress()) {
 840           // Only concurrent GC sets this.
 841           expected |= ShenandoahHeap::WEAK_ROOTS;
 842         }
 843         break;
 844       default:
 845         enabled = false;
 846         assert(false, "Unhandled gc-state verification");
 847     }
 848 
 849     if (enabled) {
 850       char actual = _heap->gc_state();
 851 
 852       bool is_marking = (actual & ShenandoahHeap::MARKING);
 853       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING));
 854       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 855 
 856       // Old generation marking is allowed in all states.
 857       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 858         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 859       }
 860 
 861       VerifyThreadGCState vtgcs(label, expected);
 862       Threads::java_threads_do(&vtgcs);
 863     }
 864   }
 865 
 866   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 867   ShenandoahGCStateResetter resetter;
 868 
 869   // Heap size checks
 870   {
 871     ShenandoahHeapLocker lock(_heap->lock());
 872 
 873     ShenandoahCalculateRegionStatsClosure cl;
 874     _heap->heap_region_iterate(&cl);
 875     size_t heap_used;
 876     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 877       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 878       heap_used = _heap->used() + _heap->old_generation()->get_pad_for_promote_in_place();
 879     } else if (sizeness != _verify_size_disable) {
 880       heap_used = _heap->used();
 881     }
 882     if (sizeness != _verify_size_disable) {
 883       guarantee(cl.used() == heap_used,
 884                 "%s: heap used size must be consistent: heap-used = %zu%s, regions-used = %zu%s",
 885                 label,
 886                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 887                 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used()));
 888     }
 889     size_t heap_committed = _heap->committed();
 890     guarantee(cl.committed() == heap_committed,
 891               "%s: heap committed size must be consistent: heap-committed = %zu%s, regions-committed = %zu%s",
 892               label,
 893               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 894               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 895   }
 896 
 897   log_debug(gc)("Safepoint verification finished heap usage verification");
 898 
 899   ShenandoahGeneration* generation;
 900   if (_heap->mode()->is_generational()) {
 901     generation = _heap->gc_generation();
 902     guarantee(generation != nullptr, "Need to know which generation to verify.");
 903     shenandoah_assert_generations_reconciled();
 904   } else {
 905     generation = nullptr;
 906   }
 907 
 908   if (generation != nullptr) {
 909     ShenandoahHeapLocker lock(_heap->lock());
 910 
 911     switch (remembered) {
 912       case _verify_remembered_disable:
 913         break;
 914       case _verify_remembered_before_marking:
 915         log_debug(gc)("Safepoint verification of remembered set at mark");
 916         verify_rem_set_before_mark();
 917         break;
 918       case _verify_remembered_before_updating_references:
 919         log_debug(gc)("Safepoint verification of remembered set at update ref");
 920         verify_rem_set_before_update_ref();
 921         break;
 922       case _verify_remembered_after_full_gc:
 923         log_debug(gc)("Safepoint verification of remembered set after full gc");
 924         verify_rem_set_after_full_gc();
 925         break;
 926       default:
 927         fatal("Unhandled remembered set verification mode");
 928     }
 929 
 930     ShenandoahGenerationStatsClosure cl;
 931     _heap->heap_region_iterate(&cl);
 932 
 933     if (LogTarget(Debug, gc)::is_enabled()) {
 934       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl.old);
 935       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl.young);
 936       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global);
 937     }
 938     if (sizeness == _verify_size_adjusted_for_padding) {
 939       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 940       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young);
 941       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global);
 942     } else if (sizeness == _verify_size_exact) {
 943       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 944       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young);
 945       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global);
 946     }
 947     // else: sizeness must equal _verify_size_disable
 948   }
 949 
 950   log_debug(gc)("Safepoint verification finished remembered set verification");
 951 
 952   // Internal heap region checks
 953   if (ShenandoahVerifyLevel >= 1) {
 954     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 955     if (generation != nullptr) {
 956       generation->heap_region_iterate(&cl);
 957     } else {
 958       _heap->heap_region_iterate(&cl);
 959     }
 960   }
 961 
 962   log_debug(gc)("Safepoint verification finished heap region closure verification");
 963 
 964   OrderAccess::fence();
 965 
 966   if (UseTLAB) {
 967     _heap->labs_make_parsable();
 968   }
 969 
 970   // Allocate temporary bitmap for storing marking wavefront:
 971   _verification_bit_map->clear();
 972 
 973   // Allocate temporary array for storing liveness data
 974   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
 975   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
 976 
 977   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
 978 
 979   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
 980   // This verifies what application can see, since it only cares about reachable objects.
 981   size_t count_reachable = 0;
 982   if (ShenandoahVerifyLevel >= 2) {
 983     ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
 984     _heap->workers()->run_task(&task);
 985     count_reachable = task.processed();
 986   }
 987 
 988   log_debug(gc)("Safepoint verification finished getting initial reachable set");
 989 
 990   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
 991   // not the application, can see during the region scans. There is no reason to process the objects
 992   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
 993   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
 994   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
 995   // version
 996 
 997   size_t count_marked = 0;
 998   if (ShenandoahVerifyLevel >= 4 &&
 999         (marked == _verify_marked_complete ||
1000          marked == _verify_marked_complete_except_references ||
1001          marked == _verify_marked_complete_satb_empty)) {
1002     guarantee(_heap->gc_generation()->is_mark_complete(), "Marking context should be complete");
1003     ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
1004     _heap->workers()->run_task(&task);
1005     count_marked = task.processed();
1006   } else {
1007     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
1008   }
1009 
1010   log_debug(gc)("Safepoint verification finished walking marked objects");
1011 
1012   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
1013   // marked objects.
1014 
1015   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
1016     for (size_t i = 0; i < _heap->num_regions(); i++) {
1017       ShenandoahHeapRegion* r = _heap->get_region(i);
1018       if (generation != nullptr && !generation->contains(r)) {
1019         continue;
1020       }
1021 
1022       juint verf_live = 0;
1023       if (r->is_humongous()) {
1024         // For humongous objects, test if start region is marked live, and if so,
1025         // all humongous regions in that chain have live data equal to their "used".
1026         juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]);
1027         if (start_live > 0) {
1028           verf_live = (juint)(r->used() / HeapWordSize);
1029         }
1030       } else {
1031         verf_live = Atomic::load(&ld[r->index()]);
1032       }
1033 
1034       size_t reg_live = r->get_live_data_words();
1035       if (reg_live != verf_live) {
1036         stringStream ss;
1037         r->print_on(&ss);
1038         fatal("%s: Live data should match: region-live = %zu, verifier-live = " UINT32_FORMAT "\n%s",
1039               label, reg_live, verf_live, ss.freeze());
1040       }
1041     }
1042   }
1043 
1044   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1045 
1046 
1047   log_info(gc)("Verify %s, Level %zd (%zu reachable, %zu marked)",
1048                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1049 
1050   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1051 }
1052 
1053 void ShenandoahVerifier::verify_generic(VerifyOption vo) {
1054   verify_at_safepoint(
1055           "Generic Verification",
1056           _verify_remembered_disable,  // do not verify remembered set
1057           _verify_forwarded_allow,     // conservatively allow forwarded
1058           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1059           _verify_cset_disable,        // cset may be inconsistent
1060           _verify_liveness_disable,    // no reliable liveness data
1061           _verify_regions_disable,     // no reliable region data
1062           _verify_size_exact,          // expect generation and heap sizes to match exactly
1063           _verify_gcstate_disable      // no data about gcstate
1064   );
1065 }
1066 
1067 void ShenandoahVerifier::verify_before_concmark() {
1068   VerifyRememberedSet verify_remembered_set = _verify_remembered_before_marking;
1069   if (_heap->mode()->is_generational() &&
1070       !_heap->old_generation()->is_mark_complete()) {
1071     // Before marking in generational mode, remembered set can't be verified w/o complete old marking.
1072     verify_remembered_set = _verify_remembered_disable;
1073   }
1074   verify_at_safepoint(
1075           "Before Mark",
1076           verify_remembered_set,
1077                                        // verify read-only remembered set from bottom() to top()
1078           _verify_forwarded_none,      // UR should have fixed up
1079           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1080           _verify_cset_none,           // UR should have fixed this
1081           _verify_liveness_disable,    // no reliable liveness data
1082           _verify_regions_notrash,     // no trash regions
1083           _verify_size_exact,          // expect generation and heap sizes to match exactly
1084           _verify_gcstate_stable       // there are no forwarded objects
1085   );
1086 }
1087 
1088 void ShenandoahVerifier::verify_after_concmark() {
1089   verify_at_safepoint(
1090           "After Mark",
1091           _verify_remembered_disable,         // do not verify remembered set
1092           _verify_forwarded_none,             // no forwarded references
1093           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1094           _verify_cset_none,                  // no references to cset anymore
1095           _verify_liveness_complete,          // liveness data must be complete here
1096           _verify_regions_disable,            // trash regions not yet recycled
1097           _verify_size_exact,                 // expect generation and heap sizes to match exactly
1098           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1099   );
1100 }
1101 
1102 void ShenandoahVerifier::verify_after_concmark_with_promotions() {
1103   verify_at_safepoint(
1104           "After Mark",
1105           _verify_remembered_disable,         // do not verify remembered set
1106           _verify_forwarded_none,             // no forwarded references
1107           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1108           _verify_cset_none,                  // no references to cset anymore
1109           _verify_liveness_complete,          // liveness data must be complete here
1110           _verify_regions_disable,            // trash regions not yet recycled
1111           _verify_size_adjusted_for_padding,  // expect generation and heap sizes to match after adjustments
1112                                               // for promote in place padding
1113           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1114   );
1115 }
1116 
1117 void ShenandoahVerifier::verify_before_evacuation() {
1118   verify_at_safepoint(
1119           "Before Evacuation",
1120           _verify_remembered_disable,                // do not verify remembered set
1121           _verify_forwarded_none,                    // no forwarded references
1122           _verify_marked_complete_except_references, // walk over marked objects too
1123           _verify_cset_disable,                      // non-forwarded references to cset expected
1124           _verify_liveness_complete,                 // liveness data must be complete here
1125           _verify_regions_disable,                   // trash regions not yet recycled
1126           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1127                                                      //  for promote in place padding
1128           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1129   );
1130 }
1131 
1132 void ShenandoahVerifier::verify_before_update_refs() {
1133   VerifyRememberedSet verify_remembered_set = _verify_remembered_before_updating_references;
1134   if (_heap->mode()->is_generational() &&
1135       !_heap->old_generation()->is_mark_complete()) {
1136     verify_remembered_set = _verify_remembered_disable;
1137   }
1138   verify_at_safepoint(
1139           "Before Updating References",
1140           verify_remembered_set,        // verify read-write remembered set
1141           _verify_forwarded_allow,     // forwarded references allowed
1142           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1143           _verify_cset_forwarded,      // all cset refs are fully forwarded
1144           _verify_liveness_disable,    // no reliable liveness data anymore
1145           _verify_regions_notrash,     // trash regions have been recycled already
1146           _verify_size_exact,          // expect generation and heap sizes to match exactly
1147           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1148   );
1149 }
1150 
1151 // We have not yet cleanup (reclaimed) the collection set
1152 void ShenandoahVerifier::verify_after_update_refs() {
1153   verify_at_safepoint(
1154           "After Updating References",
1155           _verify_remembered_disable,  // do not verify remembered set
1156           _verify_forwarded_none,      // no forwarded references
1157           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1158           _verify_cset_none,           // no cset references, all updated
1159           _verify_liveness_disable,    // no reliable liveness data anymore
1160           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1161           _verify_size_exact,          // expect generation and heap sizes to match exactly
1162           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1163   );
1164 }
1165 
1166 void ShenandoahVerifier::verify_after_degenerated() {
1167   verify_at_safepoint(
1168           "After Degenerated GC",
1169           _verify_remembered_disable,  // do not verify remembered set
1170           _verify_forwarded_none,      // all objects are non-forwarded
1171           _verify_marked_complete,     // all objects are marked in complete bitmap
1172           _verify_cset_none,           // no cset references
1173           _verify_liveness_disable,    // no reliable liveness data anymore
1174           _verify_regions_notrash_nocset, // no trash, no cset
1175           _verify_size_exact,          // expect generation and heap sizes to match exactly
1176           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1177   );
1178 }
1179 
1180 void ShenandoahVerifier::verify_before_fullgc() {
1181   verify_at_safepoint(
1182           "Before Full GC",
1183           _verify_remembered_disable,  // do not verify remembered set
1184           _verify_forwarded_allow,     // can have forwarded objects
1185           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1186           _verify_cset_disable,        // cset might be foobared
1187           _verify_liveness_disable,    // no reliable liveness data anymore
1188           _verify_regions_disable,     // no reliable region data here
1189           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1190           _verify_gcstate_disable      // no reliable gcstate data
1191   );
1192 }
1193 
1194 void ShenandoahVerifier::verify_after_fullgc() {
1195   verify_at_safepoint(
1196           "After Full GC",
1197           _verify_remembered_after_full_gc,  // verify read-write remembered set
1198           _verify_forwarded_none,      // all objects are non-forwarded
1199           _verify_marked_incomplete,   // all objects are marked in incomplete bitmap
1200           _verify_cset_none,           // no cset references
1201           _verify_liveness_disable,    // no reliable liveness data anymore
1202           _verify_regions_notrash_nocset, // no trash, no cset
1203           _verify_size_exact,           // expect generation and heap sizes to match exactly
1204           _verify_gcstate_stable        // full gc cleaned up everything
1205   );
1206 }
1207 
1208 class ShenandoahVerifyNoForwarded : public BasicOopIterateClosure {
1209 private:
1210   template <class T>
1211   void do_oop_work(T* p) {
1212     T o = RawAccess<>::oop_load(p);
1213     if (!CompressedOops::is_null(o)) {
1214       oop obj = CompressedOops::decode_not_null(o);
1215       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1216       if (obj != fwd) {
1217         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1218                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1219       }
1220     }
1221   }
1222 
1223 public:
1224   void do_oop(narrowOop* p) { do_oop_work(p); }
1225   void do_oop(oop* p)       { do_oop_work(p); }
1226 };
1227 
1228 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1229 private:
1230   template <class T>
1231   void do_oop_work(T* p) {
1232     T o = RawAccess<>::oop_load(p);
1233     if (!CompressedOops::is_null(o)) {
1234       oop obj = CompressedOops::decode_not_null(o);
1235       ShenandoahHeap* heap = ShenandoahHeap::heap();
1236 
1237       if (!heap->marking_context()->is_marked_or_old(obj)) {
1238         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1239                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1240       }
1241 
1242       if (heap->in_collection_set(obj)) {
1243         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1244                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1245       }
1246 
1247       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1248       if (obj != fwd) {
1249         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1250                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1251       }
1252     }
1253   }
1254 
1255 public:
1256   void do_oop(narrowOop* p) override { do_oop_work(p); }
1257   void do_oop(oop* p)       override { do_oop_work(p); }
1258 };
1259 
1260 void ShenandoahVerifier::verify_roots_in_to_space() {
1261   ShenandoahVerifyInToSpaceClosure cl;
1262   ShenandoahRootVerifier::roots_do(&cl);
1263 }
1264 
1265 void ShenandoahVerifier::verify_roots_no_forwarded() {
1266   ShenandoahVerifyNoForwarded cl;
1267   ShenandoahRootVerifier::roots_do(&cl);
1268 }
1269 
1270 template<typename Scanner>
1271 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1272 protected:
1273   ShenandoahGenerationalHeap* const _heap;
1274   Scanner*   const _scanner;
1275   const char* _message;
1276 
1277 public:
1278   // Argument distinguishes between initial mark or start of update refs verification.
1279   explicit ShenandoahVerifyRemSetClosure(Scanner* scanner, const char* message) :
1280             _heap(ShenandoahGenerationalHeap::heap()),
1281             _scanner(scanner),
1282             _message(message) {}
1283 
1284   template<class T>
1285   inline void work(T* p) {
1286     T o = RawAccess<>::oop_load(p);
1287     if (!CompressedOops::is_null(o)) {
1288       oop obj = CompressedOops::decode_not_null(o);
1289       if (_heap->is_in_young(obj) && !_scanner->is_card_dirty((HeapWord*) p)) {
1290         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1291                                          _message, "clean card, it should be dirty.", __FILE__, __LINE__);
1292       }
1293     }
1294   }
1295 
1296   void do_oop(narrowOop* p) override { work(p); }
1297   void do_oop(oop* p)       override { work(p); }
1298 };
1299 
1300 template<typename Scanner>
1301 void ShenandoahVerifier::help_verify_region_rem_set(Scanner* scanner, ShenandoahHeapRegion* r,
1302                                                     HeapWord* registration_watermark, const char* message) {
1303   shenandoah_assert_generations_reconciled();
1304   ShenandoahOldGeneration* old_gen = _heap->old_generation();
1305   assert(old_gen->is_mark_complete() || old_gen->is_parsable(), "Sanity");
1306 
1307   ShenandoahMarkingContext* ctx = old_gen->is_mark_complete() ? old_gen->complete_marking_context() : nullptr;
1308   ShenandoahVerifyRemSetClosure<Scanner> check_interesting_pointers(scanner, message);
1309   HeapWord* from = r->bottom();
1310   HeapWord* obj_addr = from;
1311   if (r->is_humongous_start()) {
1312     oop obj = cast_to_oop(obj_addr);
1313     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1314       // For humongous objects, the typical object is an array, so the following checks may be overkill
1315       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1316       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1317       if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1318         obj->oop_iterate(&check_interesting_pointers);
1319       }
1320       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1321     }
1322     // else, this humongous object is not live so no need to verify its internal pointers
1323 
1324     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1325       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1326                                        "object not properly registered", __FILE__, __LINE__);
1327     }
1328   } else if (!r->is_humongous()) {
1329     HeapWord* top = r->top();
1330     while (obj_addr < top) {
1331       oop obj = cast_to_oop(obj_addr);
1332       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1333       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1334         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1335         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1336         if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1337           obj->oop_iterate(&check_interesting_pointers);
1338         }
1339         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1340 
1341         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1342           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1343                                            "object not properly registered", __FILE__, __LINE__);
1344         }
1345         obj_addr += obj->size();
1346       } else {
1347         // This object is not live so we don't verify dirty cards contained therein
1348         HeapWord* tams = ctx->top_at_mark_start(r);
1349         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1350       }
1351     }
1352   }
1353 }
1354 
1355 class ShenandoahWriteTableScanner {
1356 private:
1357   ShenandoahScanRemembered* _scanner;
1358 public:
1359   explicit ShenandoahWriteTableScanner(ShenandoahScanRemembered* scanner) : _scanner(scanner) {}
1360 
1361   bool is_card_dirty(HeapWord* obj_addr) {
1362     return _scanner->is_write_card_dirty(obj_addr);
1363   }
1364 
1365   bool verify_registration(HeapWord* obj_addr, ShenandoahMarkingContext* ctx) {
1366     return _scanner->verify_registration(obj_addr, ctx);
1367   }
1368 };
1369 
1370 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1371 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1372 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1373 // restore them until the start of evacuation.
1374 void ShenandoahVerifier::verify_rem_set_before_mark() {
1375   shenandoah_assert_safepoint();
1376   shenandoah_assert_generational();
1377 
1378   ShenandoahOldGeneration* old_generation = _heap->old_generation();
1379 
1380   log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young");
1381 
1382   ShenandoahScanRemembered* scanner = old_generation->card_scan();
1383   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1384     ShenandoahHeapRegion* r = _heap->get_region(i);
1385     if (r->is_old() && r->is_active()) {
1386       help_verify_region_rem_set(scanner, r, r->end(), "Verify init-mark remembered set violation");
1387     }
1388   }
1389 }
1390 
1391 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1392   shenandoah_assert_safepoint();
1393   shenandoah_assert_generational();
1394 
1395   ShenandoahWriteTableScanner scanner(ShenandoahGenerationalHeap::heap()->old_generation()->card_scan());
1396   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1397     ShenandoahHeapRegion* r = _heap->get_region(i);
1398     if (r->is_old() && !r->is_cset()) {
1399       help_verify_region_rem_set(&scanner, r, r->top(), "Remembered set violation at end of Full GC");
1400     }
1401   }
1402 }
1403 
1404 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1405 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1406 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1407 // all PLABS are retired immediately before the start of update refs.
1408 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1409   shenandoah_assert_safepoint();
1410   shenandoah_assert_generational();
1411 
1412   ShenandoahWriteTableScanner scanner(_heap->old_generation()->card_scan());
1413   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1414     ShenandoahHeapRegion* r = _heap->get_region(i);
1415     if (r->is_old() && !r->is_cset()) {
1416       help_verify_region_rem_set(&scanner, r, r->get_update_watermark(), "Remembered set violation at init-update-references");
1417     }
1418   }
1419 }
1420 
1421 void ShenandoahVerifier::verify_before_rebuilding_free_set() {
1422   ShenandoahGenerationStatsClosure cl;
1423   _heap->heap_region_iterate(&cl);
1424 
1425   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->old_generation(), cl.old);
1426   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->young_generation(), cl.young);
1427   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->global_generation(), cl.global);
1428 }