1 /*
   2  * Copyright (c) 2017, 2021, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  31 #include "gc/shenandoah/shenandoahGeneration.hpp"
  32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  33 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  34 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
  37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  38 #include "gc/shenandoah/shenandoahUtils.hpp"
  39 #include "gc/shenandoah/shenandoahVerifier.hpp"
  40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  41 #include "memory/allocation.hpp"
  42 #include "memory/iterator.inline.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/compressedOops.inline.hpp"
  45 #include "runtime/atomic.hpp"
  46 #include "runtime/orderAccess.hpp"
  47 #include "runtime/threads.hpp"
  48 #include "utilities/align.hpp"
  49 
  50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  51 #ifdef verify_oop
  52 #undef verify_oop
  53 #endif
  54 
  55 static bool is_instance_ref_klass(Klass* k) {
  56   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  57 }
  58 
  59 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  60 private:
  61   const char* _phase;
  62   ShenandoahVerifier::VerifyOptions _options;
  63   ShenandoahVerifierStack* _stack;
  64   ShenandoahHeap* _heap;
  65   MarkBitMap* _map;
  66   ShenandoahLivenessData* _ld;
  67   void* _interior_loc;
  68   oop _loc;
  69   ReferenceIterationMode _ref_mode;
  70   ShenandoahGeneration* _generation;
  71 
  72 public:
  73   ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
  74                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  75     _phase(phase),
  76     _options(options),
  77     _stack(stack),
  78     _heap(ShenandoahHeap::heap()),
  79     _map(map),
  80     _ld(ld),
  81     _interior_loc(nullptr),
  82     _loc(nullptr),
  83     _generation(nullptr) {
  84     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  85         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  86         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  87       // Unknown status for Reference.referent field. Do not touch it, it might be dead.
  88       // Normally, barriers would prevent us from seeing the dead referents, but verifier
  89       // runs with barriers disabled.
  90       _ref_mode = DO_FIELDS_EXCEPT_REFERENT;
  91     } else {
  92       // Otherwise do all fields.
  93       _ref_mode = DO_FIELDS;
  94     }
  95 
  96     if (_heap->mode()->is_generational()) {
  97       _generation = _heap->gc_generation();
  98       assert(_generation != nullptr, "Expected active generation in this mode");
  99       shenandoah_assert_generations_reconciled();
 100     }
 101   }
 102 
 103   ReferenceIterationMode reference_iteration_mode() override {
 104     return _ref_mode;
 105   }
 106 
 107 private:
 108   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 109     if (!test) {
 110       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 111     }
 112   }
 113 
 114   template <class T>
 115   void do_oop_work(T* p) {
 116     T o = RawAccess<>::oop_load(p);
 117     if (!CompressedOops::is_null(o)) {
 118       oop obj = CompressedOops::decode_not_null(o);
 119       if (is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 120         obj = ShenandoahForwarding::get_forwardee(obj);
 121       }
 122       // Single threaded verification can use faster non-atomic stack and bitmap
 123       // methods.
 124       //
 125       // For performance reasons, only fully verify non-marked field values.
 126       // We are here when the host object for *p is already marked.
 127       if (in_generation(obj) && _map->par_mark(obj)) {
 128         verify_oop_at(p, obj);
 129         _stack->push(ShenandoahVerifierTask(obj));
 130       }
 131     }
 132   }
 133 
 134   bool in_generation(oop obj) {
 135     if (_generation == nullptr) {
 136       return true;
 137     }
 138 
 139     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 140     return _generation->contains(region);
 141   }
 142 
 143   void verify_oop(oop obj) {
 144     // Perform consistency checks with gradually decreasing safety level. This guarantees
 145     // that failure report would not try to touch something that was not yet verified to be
 146     // safe to process.
 147 
 148     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in_reserved(obj),
 149               "oop must be in heap bounds");
 150     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 151               "oop must be aligned");
 152 
 153     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 154     Klass* obj_klass = ShenandoahForwarding::klass(obj);
 155 
 156     // Verify that obj is not in dead space:
 157     {
 158       // Do this before touching obj->size()
 159       check(ShenandoahAsserts::_safe_unknown, obj, obj_klass != nullptr,
 160              "Object klass pointer should not be null");
 161       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 162              "Object klass pointer must go to metaspace");
 163 
 164       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 165       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 166              "Object start should be within the region");
 167 
 168       if (!obj_reg->is_humongous()) {
 169         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + ShenandoahForwarding::size(obj)) <= obj_reg->top(),
 170                "Object end should be within the region");
 171       } else {
 172         size_t humongous_start = obj_reg->index();
 173         size_t humongous_end = humongous_start + (ShenandoahForwarding::size(obj) >> ShenandoahHeapRegion::region_size_words_shift());
 174         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 175           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 176                  "Humongous object is in continuation that fits it");
 177         }
 178       }
 179 
 180       // ------------ obj is safe at this point --------------
 181 
 182       check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(),
 183             "Object should be in active region");
 184 
 185       switch (_options._verify_liveness) {
 186         case ShenandoahVerifier::_verify_liveness_disable:
 187           // skip
 188           break;
 189         case ShenandoahVerifier::_verify_liveness_complete:
 190           Atomic::add(&_ld[obj_reg->index()], (uint) ShenandoahForwarding::size(obj), memory_order_relaxed);
 191           // fallthrough for fast failure for un-live regions:
 192         case ShenandoahVerifier::_verify_liveness_conservative:
 193           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 194                 (obj_reg->is_old() && _heap->gc_generation()->is_young()),
 195                    "Object must belong to region with live data");
 196           shenandoah_assert_generations_reconciled();
 197           break;
 198         default:
 199           assert(false, "Unhandled liveness verification");
 200       }
 201     }
 202 
 203     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 204 
 205     ShenandoahHeapRegion* fwd_reg = nullptr;
 206 
 207     if (obj != fwd) {
 208       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in_reserved(fwd),
 209              "Forwardee must be in heap bounds");
 210       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 211              "Forwardee is set");
 212       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 213              "Forwardee must be aligned");
 214 
 215       // Do this before touching fwd->size()
 216       Klass* fwd_klass = fwd->klass_or_null();
 217       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 218              "Forwardee klass pointer should not be null");
 219       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 220              "Forwardee klass pointer must go to metaspace");
 221       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 222              "Forwardee klass pointer must go to metaspace");
 223 
 224       fwd_reg = _heap->heap_region_containing(fwd);
 225 
 226       check(ShenandoahAsserts::_safe_oop, obj, fwd_reg->is_active(),
 227             "Forwardee should be in active region");
 228 
 229       // Verify that forwardee is not in the dead space:
 230       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 231              "Should have no humongous forwardees");
 232 
 233       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 234       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 235              "Forwardee start should be within the region");
 236       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + ShenandoahForwarding::size(fwd)) <= fwd_reg->top(),
 237              "Forwardee end should be within the region");
 238 
 239       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 240       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 241              "Double forwarding");
 242     } else {
 243       fwd_reg = obj_reg;
 244     }
 245 
 246     // Do additional checks for special objects: their fields can hold metadata as well.
 247     // We want to check class loading/unloading did not corrupt them.
 248 
 249     if (obj_klass == vmClasses::Class_klass()) {
 250       Metadata* klass = obj->metadata_field(java_lang_Class::klass_offset());
 251       check(ShenandoahAsserts::_safe_oop, obj,
 252             klass == nullptr || Metaspace::contains(klass),
 253             "Instance class mirror should point to Metaspace");
 254 
 255       Metadata* array_klass = obj->metadata_field(java_lang_Class::array_klass_offset());
 256       check(ShenandoahAsserts::_safe_oop, obj,
 257             array_klass == nullptr || Metaspace::contains(array_klass),
 258             "Array class mirror should point to Metaspace");
 259     }
 260 
 261     // ------------ obj and fwd are safe at this point --------------
 262     switch (_options._verify_marked) {
 263       case ShenandoahVerifier::_verify_marked_disable:
 264         // skip
 265         break;
 266       case ShenandoahVerifier::_verify_marked_incomplete:
 267         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked(obj),
 268                "Must be marked in incomplete bitmap");
 269         break;
 270       case ShenandoahVerifier::_verify_marked_complete:
 271         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked(obj),
 272                "Must be marked in complete bitmap");
 273         break;
 274       case ShenandoahVerifier::_verify_marked_complete_except_references:
 275       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 276         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked(obj),
 277               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 278         break;
 279       default:
 280         assert(false, "Unhandled mark verification");
 281     }
 282 
 283     switch (_options._verify_forwarded) {
 284       case ShenandoahVerifier::_verify_forwarded_disable:
 285         // skip
 286         break;
 287       case ShenandoahVerifier::_verify_forwarded_none: {
 288         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 289                "Should not be forwarded");
 290         break;
 291       }
 292       case ShenandoahVerifier::_verify_forwarded_allow: {
 293         if (obj != fwd) {
 294           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 295                  "Forwardee should be in another region");
 296         }
 297         break;
 298       }
 299       default:
 300         assert(false, "Unhandled forwarding verification");
 301     }
 302 
 303     switch (_options._verify_cset) {
 304       case ShenandoahVerifier::_verify_cset_disable:
 305         // skip
 306         break;
 307       case ShenandoahVerifier::_verify_cset_none:
 308         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 309                "Should not have references to collection set");
 310         break;
 311       case ShenandoahVerifier::_verify_cset_forwarded:
 312         if (_heap->in_collection_set(obj)) {
 313           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 314                  "Object in collection set, should have forwardee");
 315         }
 316         break;
 317       default:
 318         assert(false, "Unhandled cset verification");
 319     }
 320 
 321   }
 322 
 323 public:
 324   /**
 325    * Verify object with known interior reference.
 326    * @param p interior reference where the object is referenced from; can be off-heap
 327    * @param obj verified object
 328    */
 329   template <class T>
 330   void verify_oop_at(T* p, oop obj) {
 331     _interior_loc = p;
 332     verify_oop(obj);
 333     _interior_loc = nullptr;
 334   }
 335 
 336   /**
 337    * Verify object without known interior reference.
 338    * Useful when picking up the object at known offset in heap,
 339    * but without knowing what objects reference it.
 340    * @param obj verified object
 341    */
 342   void verify_oop_standalone(oop obj) {
 343     _interior_loc = nullptr;
 344     verify_oop(obj);
 345     _interior_loc = nullptr;
 346   }
 347 
 348   /**
 349    * Verify oop fields from this object.
 350    * @param obj host object for verified fields
 351    */
 352   void verify_oops_from(oop obj) {
 353     _loc = obj;
 354     // oop_iterate() can not deal with forwarded objects, because
 355     // it needs to load klass(), which may be overridden by the
 356     // forwarding pointer.
 357     oop fwd = ShenandoahForwarding::get_forwardee_raw(obj);
 358     fwd->oop_iterate(this);
 359     _loc = nullptr;
 360   }
 361 
 362   void do_oop(oop* p) override { do_oop_work(p); }
 363   void do_oop(narrowOop* p) override { do_oop_work(p); }
 364 };
 365 
 366 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 367 // a subset (e.g. the young generation or old generation) of the total heap.
 368 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 369 private:
 370   size_t _used, _committed, _garbage, _regions, _humongous_waste, _trashed_regions;
 371 public:
 372   ShenandoahCalculateRegionStatsClosure() :
 373       _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0), _trashed_regions(0) {};
 374 
 375   void heap_region_do(ShenandoahHeapRegion* r) override {
 376     _used += r->used();
 377     _garbage += r->garbage();
 378     _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0;
 379     if (r->is_humongous()) {
 380       _humongous_waste += r->free();
 381     }
 382     if (r->is_trash()) {
 383       _trashed_regions++;
 384     }
 385     _regions++;
 386     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding " SIZE_FORMAT " for %s Region " SIZE_FORMAT ", yielding: " SIZE_FORMAT,
 387             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 388   }
 389 
 390   size_t used() const { return _used; }
 391   size_t committed() const { return _committed; }
 392   size_t garbage() const { return _garbage; }
 393   size_t regions() const { return _regions; }
 394   size_t waste() const { return _humongous_waste; }
 395 
 396   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 397   size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 398   size_t non_trashed_span() const { return (_regions - _trashed_regions) * ShenandoahHeapRegion::region_size_bytes(); }
 399 };
 400 
 401 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 402  public:
 403   ShenandoahCalculateRegionStatsClosure old;
 404   ShenandoahCalculateRegionStatsClosure young;
 405   ShenandoahCalculateRegionStatsClosure global;
 406 
 407   void heap_region_do(ShenandoahHeapRegion* r) override {
 408     switch (r->affiliation()) {
 409       case FREE:
 410         return;
 411       case YOUNG_GENERATION:
 412         young.heap_region_do(r);
 413         global.heap_region_do(r);
 414         break;
 415       case OLD_GENERATION:
 416         old.heap_region_do(r);
 417         global.heap_region_do(r);
 418         break;
 419       default:
 420         ShouldNotReachHere();
 421     }
 422   }
 423 
 424   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 425     log_debug(gc)("Safepoint verification: %s verified usage: " SIZE_FORMAT "%s, recorded usage: " SIZE_FORMAT "%s",
 426                   generation->name(),
 427                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 428                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 429   }
 430 
 431   static void validate_usage(const bool adjust_for_padding,
 432                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 433     ShenandoahHeap* heap = ShenandoahHeap::heap();
 434     size_t generation_used = generation->used();
 435     size_t generation_used_regions = generation->used_regions();
 436     if (adjust_for_padding && (generation->is_young() || generation->is_global())) {
 437       size_t pad = heap->old_generation()->get_pad_for_promote_in_place();
 438       generation_used += pad;
 439     }
 440 
 441     guarantee(stats.used() == generation_used,
 442               "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT,
 443               label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats.used()));
 444 
 445     guarantee(stats.regions() == generation_used_regions,
 446               "%s: generation (%s) used regions (" SIZE_FORMAT ") must equal regions that are in use (" SIZE_FORMAT ")",
 447               label, generation->name(), generation->used_regions(), stats.regions());
 448 
 449     size_t generation_capacity = generation->max_capacity();
 450     guarantee(stats.non_trashed_span() <= generation_capacity,
 451               "%s: generation (%s) size spanned by regions (" SIZE_FORMAT ") * region size (" PROPERFMT
 452               ") must not exceed current capacity (" PROPERFMT ")",
 453               label, generation->name(), stats.regions(), PROPERFMTARGS(ShenandoahHeapRegion::region_size_bytes()),
 454               PROPERFMTARGS(generation_capacity));
 455 
 456     size_t humongous_waste = generation->get_humongous_waste();
 457     guarantee(stats.waste() == humongous_waste,
 458               "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT,
 459               label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste()));
 460   }
 461 };
 462 
 463 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 464 private:
 465   ShenandoahHeap* _heap;
 466   const char* _phase;
 467   ShenandoahVerifier::VerifyRegions _regions;
 468 public:
 469   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 470     _heap(ShenandoahHeap::heap()),
 471     _phase(phase),
 472     _regions(regions) {};
 473 
 474   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 475     ResourceMark rm;
 476 
 477     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 478 
 479     stringStream ss;
 480     r->print_on(&ss);
 481     msg.append("%s", ss.as_string());
 482 
 483     report_vm_error(__FILE__, __LINE__, msg.buffer());
 484   }
 485 
 486   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 487     if (!test) {
 488       print_failure(r, msg);
 489     }
 490   }
 491 
 492   void heap_region_do(ShenandoahHeapRegion* r) override {
 493     switch (_regions) {
 494       case ShenandoahVerifier::_verify_regions_disable:
 495         break;
 496       case ShenandoahVerifier::_verify_regions_notrash:
 497         verify(r, !r->is_trash(),
 498                "Should not have trash regions");
 499         break;
 500       case ShenandoahVerifier::_verify_regions_nocset:
 501         verify(r, !r->is_cset(),
 502                "Should not have cset regions");
 503         break;
 504       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 505         verify(r, !r->is_trash(),
 506                "Should not have trash regions");
 507         verify(r, !r->is_cset(),
 508                "Should not have cset regions");
 509         break;
 510       default:
 511         ShouldNotReachHere();
 512     }
 513 
 514     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 515            "Capacity should match region size");
 516 
 517     verify(r, r->bottom() <= r->top(),
 518            "Region top should not be less than bottom");
 519 
 520     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 521            "Region TAMS should not be less than bottom");
 522 
 523     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 524            "Complete TAMS should not be larger than top");
 525 
 526     verify(r, r->get_live_data_bytes() <= r->capacity(),
 527            "Live data cannot be larger than capacity");
 528 
 529     verify(r, r->garbage() <= r->capacity(),
 530            "Garbage cannot be larger than capacity");
 531 
 532     verify(r, r->used() <= r->capacity(),
 533            "Used cannot be larger than capacity");
 534 
 535     verify(r, r->get_shared_allocs() <= r->capacity(),
 536            "Shared alloc count should not be larger than capacity");
 537 
 538     verify(r, r->get_tlab_allocs() <= r->capacity(),
 539            "TLAB alloc count should not be larger than capacity");
 540 
 541     verify(r, r->get_gclab_allocs() <= r->capacity(),
 542            "GCLAB alloc count should not be larger than capacity");
 543 
 544     verify(r, r->get_plab_allocs() <= r->capacity(),
 545            "PLAB alloc count should not be larger than capacity");
 546 
 547     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 548            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 549 
 550     verify(r, !r->is_empty() || !r->has_live(),
 551            "Empty regions should not have live data");
 552 
 553     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 554            "Transitional: region flags and collection set agree");
 555   }
 556 };
 557 
 558 class ShenandoahVerifierReachableTask : public WorkerTask {
 559 private:
 560   const char* _label;
 561   ShenandoahVerifier::VerifyOptions _options;
 562   ShenandoahHeap* _heap;
 563   ShenandoahLivenessData* _ld;
 564   MarkBitMap* _bitmap;
 565   volatile size_t _processed;
 566 
 567 public:
 568   ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
 569                                   ShenandoahLivenessData* ld,
 570                                   const char* label,
 571                                   ShenandoahVerifier::VerifyOptions options) :
 572     WorkerTask("Shenandoah Verifier Reachable Objects"),
 573     _label(label),
 574     _options(options),
 575     _heap(ShenandoahHeap::heap()),
 576     _ld(ld),
 577     _bitmap(bitmap),
 578     _processed(0) {};
 579 
 580   size_t processed() const {
 581     return _processed;
 582   }
 583 
 584   void work(uint worker_id) override {
 585     ResourceMark rm;
 586     ShenandoahVerifierStack stack;
 587 
 588     // On level 2, we need to only check the roots once.
 589     // On level 3, we want to check the roots, and seed the local stack.
 590     // It is a lesser evil to accept multiple root scans at level 3, because
 591     // extended parallelism would buy us out.
 592     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 593         || (ShenandoahVerifyLevel >= 3)) {
 594         ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 595                                       ShenandoahMessageBuffer("%s, Roots", _label),
 596                                       _options);
 597         if (_heap->unload_classes()) {
 598           ShenandoahRootVerifier::strong_roots_do(&cl);
 599         } else {
 600           ShenandoahRootVerifier::roots_do(&cl);
 601         }
 602     }
 603 
 604     size_t processed = 0;
 605 
 606     if (ShenandoahVerifyLevel >= 3) {
 607       ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 608                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 609                                     _options);
 610       while (!stack.is_empty()) {
 611         processed++;
 612         ShenandoahVerifierTask task = stack.pop();
 613         cl.verify_oops_from(task.obj());
 614       }
 615     }
 616 
 617     Atomic::add(&_processed, processed, memory_order_relaxed);
 618   }
 619 };
 620 
 621 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 622 public:
 623   void do_thread(Thread* thread) override {
 624     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 625     if (!queue.is_empty()) {
 626       fatal("All SATB buffers should have been flushed during mark");
 627     }
 628   }
 629 };
 630 
 631 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 632 private:
 633   const char* _label;
 634   ShenandoahVerifier::VerifyOptions _options;
 635   ShenandoahHeap *_heap;
 636   MarkBitMap* _bitmap;
 637   ShenandoahLivenessData* _ld;
 638   volatile size_t _claimed;
 639   volatile size_t _processed;
 640   ShenandoahGeneration* _generation;
 641 
 642 public:
 643   ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
 644                                      ShenandoahLivenessData* ld,
 645                                      const char* label,
 646                                      ShenandoahVerifier::VerifyOptions options) :
 647           WorkerTask("Shenandoah Verifier Marked Objects"),
 648           _label(label),
 649           _options(options),
 650           _heap(ShenandoahHeap::heap()),
 651           _bitmap(bitmap),
 652           _ld(ld),
 653           _claimed(0),
 654           _processed(0),
 655           _generation(nullptr) {
 656     if (_heap->mode()->is_generational()) {
 657       _generation = _heap->gc_generation();
 658       assert(_generation != nullptr, "Expected active generation in this mode.");
 659       shenandoah_assert_generations_reconciled();
 660     }
 661   };
 662 
 663   size_t processed() {
 664     return Atomic::load(&_processed);
 665   }
 666 
 667   void work(uint worker_id) override {
 668     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 669       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 670       Threads::threads_do(&verify_satb);
 671     }
 672 
 673     ShenandoahVerifierStack stack;
 674     ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 675                                   ShenandoahMessageBuffer("%s, Marked", _label),
 676                                   _options);
 677 
 678     while (true) {
 679       size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed);
 680       if (v < _heap->num_regions()) {
 681         ShenandoahHeapRegion* r = _heap->get_region(v);
 682         if (!in_generation(r)) {
 683           continue;
 684         }
 685 
 686         if (!r->is_humongous() && !r->is_trash()) {
 687           work_regular(r, stack, cl);
 688         } else if (r->is_humongous_start()) {
 689           work_humongous(r, stack, cl);
 690         }
 691       } else {
 692         break;
 693       }
 694     }
 695   }
 696 
 697   bool in_generation(ShenandoahHeapRegion* r) {
 698     return _generation == nullptr || _generation->contains(r);
 699   }
 700 
 701   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 702     size_t processed = 0;
 703     HeapWord* obj = r->bottom();
 704     if (_heap->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 705       verify_and_follow(obj, stack, cl, &processed);
 706     }
 707     Atomic::add(&_processed, processed, memory_order_relaxed);
 708   }
 709 
 710   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 711     size_t processed = 0;
 712     ShenandoahMarkingContext* ctx = _heap->complete_marking_context();
 713     HeapWord* tams = ctx->top_at_mark_start(r);
 714 
 715     // Bitmaps, before TAMS
 716     if (tams > r->bottom()) {
 717       HeapWord* start = r->bottom();
 718       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 719 
 720       while (addr < tams) {
 721         verify_and_follow(addr, stack, cl, &processed);
 722         addr += 1;
 723         if (addr < tams) {
 724           addr = ctx->get_next_marked_addr(addr, tams);
 725         }
 726       }
 727     }
 728 
 729     // Size-based, after TAMS
 730     {
 731       HeapWord* limit = r->top();
 732       HeapWord* addr = tams;
 733 
 734       while (addr < limit) {
 735         verify_and_follow(addr, stack, cl, &processed);
 736         addr += ShenandoahForwarding::size(cast_to_oop(addr));
 737       }
 738     }
 739 
 740     Atomic::add(&_processed, processed, memory_order_relaxed);
 741   }
 742 
 743   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 744     if (!_bitmap->par_mark(addr)) return;
 745 
 746     // Verify the object itself:
 747     oop obj = cast_to_oop(addr);
 748     cl.verify_oop_standalone(obj);
 749 
 750     // Verify everything reachable from that object too, hopefully realizing
 751     // everything was already marked, and never touching further:
 752     if (!is_instance_ref_klass(ShenandoahForwarding::klass(obj))) {
 753       cl.verify_oops_from(obj);
 754       (*processed)++;
 755     }
 756     while (!stack.is_empty()) {
 757       ShenandoahVerifierTask task = stack.pop();
 758       cl.verify_oops_from(task.obj());
 759       (*processed)++;
 760     }
 761   }
 762 };
 763 
 764 class VerifyThreadGCState : public ThreadClosure {
 765 private:
 766   const char* const _label;
 767          char const _expected;
 768 
 769 public:
 770   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 771   void do_thread(Thread* t) override {
 772     char actual = ShenandoahThreadLocalData::gc_state(t);
 773     if (!verify_gc_state(actual, _expected)) {
 774       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 775     }
 776   }
 777 
 778   static bool verify_gc_state(char actual, char expected) {
 779     // Old generation marking is allowed in all states.
 780     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 781       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 782     } else {
 783       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 784       return (actual == expected);
 785     }
 786   }
 787 };
 788 
 789 void ShenandoahVerifier::verify_at_safepoint(const char* label,
 790                                              VerifyRememberedSet remembered,
 791                                              VerifyForwarded forwarded, VerifyMarked marked,
 792                                              VerifyCollectionSet cset,
 793                                              VerifyLiveness liveness, VerifyRegions regions,
 794                                              VerifySize sizeness,
 795                                              VerifyGCState gcstate) {
 796   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 797   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 798 
 799   ShenandoahHeap::heap()->propagate_gc_state_to_java_threads();
 800 
 801   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 802   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 803 
 804   log_info(gc,start)("Verify %s, Level " INTX_FORMAT, label, ShenandoahVerifyLevel);
 805 
 806   // GC state checks
 807   {
 808     char expected = -1;
 809     bool enabled;
 810     switch (gcstate) {
 811       case _verify_gcstate_disable:
 812         enabled = false;
 813         break;
 814       case _verify_gcstate_forwarded:
 815         enabled = true;
 816         expected = ShenandoahHeap::HAS_FORWARDED;
 817         break;
 818       case _verify_gcstate_updating:
 819         enabled = true;
 820         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATEREFS;
 821         break;
 822       case _verify_gcstate_stable:
 823         enabled = true;
 824         expected = ShenandoahHeap::STABLE;
 825         break;
 826       case _verify_gcstate_stable_weakroots:
 827         enabled = true;
 828         expected = ShenandoahHeap::STABLE;
 829         if (!_heap->is_stw_gc_in_progress()) {
 830           // Only concurrent GC sets this.
 831           expected |= ShenandoahHeap::WEAK_ROOTS;
 832         }
 833         break;
 834       default:
 835         enabled = false;
 836         assert(false, "Unhandled gc-state verification");
 837     }
 838 
 839     if (enabled) {
 840       char actual = _heap->gc_state();
 841 
 842       bool is_marking = (actual & ShenandoahHeap::MARKING);
 843       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING));
 844       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 845 
 846       // Old generation marking is allowed in all states.
 847       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 848         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 849       }
 850 
 851       VerifyThreadGCState vtgcs(label, expected);
 852       Threads::java_threads_do(&vtgcs);
 853     }
 854   }
 855 
 856   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 857   ShenandoahGCStateResetter resetter;
 858 
 859   // Heap size checks
 860   {
 861     ShenandoahHeapLocker lock(_heap->lock());
 862 
 863     ShenandoahCalculateRegionStatsClosure cl;
 864     _heap->heap_region_iterate(&cl);
 865     size_t heap_used;
 866     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 867       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 868       heap_used = _heap->used() + _heap->old_generation()->get_pad_for_promote_in_place();
 869     } else if (sizeness != _verify_size_disable) {
 870       heap_used = _heap->used();
 871     }
 872     if (sizeness != _verify_size_disable) {
 873       guarantee(cl.used() == heap_used,
 874                 "%s: heap used size must be consistent: heap-used = " SIZE_FORMAT "%s, regions-used = " SIZE_FORMAT "%s",
 875                 label,
 876                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 877                 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used()));
 878     }
 879     size_t heap_committed = _heap->committed();
 880     guarantee(cl.committed() == heap_committed,
 881               "%s: heap committed size must be consistent: heap-committed = " SIZE_FORMAT "%s, regions-committed = " SIZE_FORMAT "%s",
 882               label,
 883               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 884               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 885   }
 886 
 887   log_debug(gc)("Safepoint verification finished heap usage verification");
 888 
 889   ShenandoahGeneration* generation;
 890   if (_heap->mode()->is_generational()) {
 891     generation = _heap->gc_generation();
 892     guarantee(generation != nullptr, "Need to know which generation to verify.");
 893     shenandoah_assert_generations_reconciled();
 894   } else {
 895     generation = nullptr;
 896   }
 897 
 898   if (generation != nullptr) {
 899     ShenandoahHeapLocker lock(_heap->lock());
 900 
 901     switch (remembered) {
 902       case _verify_remembered_disable:
 903         break;
 904       case _verify_remembered_before_marking:
 905         log_debug(gc)("Safepoint verification of remembered set at mark");
 906         verify_rem_set_before_mark();
 907         break;
 908       case _verify_remembered_before_updating_references:
 909         log_debug(gc)("Safepoint verification of remembered set at update ref");
 910         verify_rem_set_before_update_ref();
 911         break;
 912       case _verify_remembered_after_full_gc:
 913         log_debug(gc)("Safepoint verification of remembered set after full gc");
 914         verify_rem_set_after_full_gc();
 915         break;
 916       default:
 917         fatal("Unhandled remembered set verification mode");
 918     }
 919 
 920     ShenandoahGenerationStatsClosure cl;
 921     _heap->heap_region_iterate(&cl);
 922 
 923     if (LogTarget(Debug, gc)::is_enabled()) {
 924       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl.old);
 925       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl.young);
 926       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global);
 927     }
 928     if (sizeness == _verify_size_adjusted_for_padding) {
 929       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 930       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young);
 931       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global);
 932     } else if (sizeness == _verify_size_exact) {
 933       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 934       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young);
 935       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global);
 936     }
 937     // else: sizeness must equal _verify_size_disable
 938   }
 939 
 940   log_debug(gc)("Safepoint verification finished remembered set verification");
 941 
 942   // Internal heap region checks
 943   if (ShenandoahVerifyLevel >= 1) {
 944     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 945     if (generation != nullptr) {
 946       generation->heap_region_iterate(&cl);
 947     } else {
 948       _heap->heap_region_iterate(&cl);
 949     }
 950   }
 951 
 952   log_debug(gc)("Safepoint verification finished heap region closure verification");
 953 
 954   OrderAccess::fence();
 955 
 956   if (UseTLAB) {
 957     _heap->labs_make_parsable();
 958   }
 959 
 960   // Allocate temporary bitmap for storing marking wavefront:
 961   _verification_bit_map->clear();
 962 
 963   // Allocate temporary array for storing liveness data
 964   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
 965   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
 966 
 967   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
 968 
 969   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
 970   // This verifies what application can see, since it only cares about reachable objects.
 971   size_t count_reachable = 0;
 972   if (ShenandoahVerifyLevel >= 2) {
 973     ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
 974     _heap->workers()->run_task(&task);
 975     count_reachable = task.processed();
 976   }
 977 
 978   log_debug(gc)("Safepoint verification finished getting initial reachable set");
 979 
 980   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
 981   // not the application, can see during the region scans. There is no reason to process the objects
 982   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
 983   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
 984   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
 985   // version
 986 
 987   size_t count_marked = 0;
 988   if (ShenandoahVerifyLevel >= 4 &&
 989         (marked == _verify_marked_complete ||
 990          marked == _verify_marked_complete_except_references ||
 991          marked == _verify_marked_complete_satb_empty)) {
 992     guarantee(_heap->marking_context()->is_complete(), "Marking context should be complete");
 993     ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
 994     _heap->workers()->run_task(&task);
 995     count_marked = task.processed();
 996   } else {
 997     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
 998   }
 999 
1000   log_debug(gc)("Safepoint verification finished walking marked objects");
1001 
1002   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
1003   // marked objects.
1004 
1005   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
1006     for (size_t i = 0; i < _heap->num_regions(); i++) {
1007       ShenandoahHeapRegion* r = _heap->get_region(i);
1008       if (generation != nullptr && !generation->contains(r)) {
1009         continue;
1010       }
1011 
1012       juint verf_live = 0;
1013       if (r->is_humongous()) {
1014         // For humongous objects, test if start region is marked live, and if so,
1015         // all humongous regions in that chain have live data equal to their "used".
1016         juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]);
1017         if (start_live > 0) {
1018           verf_live = (juint)(r->used() / HeapWordSize);
1019         }
1020       } else {
1021         verf_live = Atomic::load(&ld[r->index()]);
1022       }
1023 
1024       size_t reg_live = r->get_live_data_words();
1025       if (reg_live != verf_live) {
1026         stringStream ss;
1027         r->print_on(&ss);
1028         fatal("%s: Live data should match: region-live = " SIZE_FORMAT ", verifier-live = " UINT32_FORMAT "\n%s",
1029               label, reg_live, verf_live, ss.freeze());
1030       }
1031     }
1032   }
1033 
1034   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1035 
1036 
1037   log_info(gc)("Verify %s, Level " INTX_FORMAT " (" SIZE_FORMAT " reachable, " SIZE_FORMAT " marked)",
1038                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1039 
1040   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1041 }
1042 
1043 void ShenandoahVerifier::verify_generic(VerifyOption vo) {
1044   verify_at_safepoint(
1045           "Generic Verification",
1046           _verify_remembered_disable,  // do not verify remembered set
1047           _verify_forwarded_allow,     // conservatively allow forwarded
1048           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1049           _verify_cset_disable,        // cset may be inconsistent
1050           _verify_liveness_disable,    // no reliable liveness data
1051           _verify_regions_disable,     // no reliable region data
1052           _verify_size_exact,          // expect generation and heap sizes to match exactly
1053           _verify_gcstate_disable      // no data about gcstate
1054   );
1055 }
1056 
1057 void ShenandoahVerifier::verify_before_concmark() {
1058     verify_at_safepoint(
1059           "Before Mark",
1060           _verify_remembered_before_marking,
1061                                        // verify read-only remembered set from bottom() to top()
1062           _verify_forwarded_none,      // UR should have fixed up
1063           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1064           _verify_cset_none,           // UR should have fixed this
1065           _verify_liveness_disable,    // no reliable liveness data
1066           _verify_regions_notrash,     // no trash regions
1067           _verify_size_exact,          // expect generation and heap sizes to match exactly
1068           _verify_gcstate_stable       // there are no forwarded objects
1069   );
1070 }
1071 
1072 void ShenandoahVerifier::verify_after_concmark() {
1073   verify_at_safepoint(
1074           "After Mark",
1075           _verify_remembered_disable,         // do not verify remembered set
1076           _verify_forwarded_none,             // no forwarded references
1077           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1078           _verify_cset_none,                  // no references to cset anymore
1079           _verify_liveness_complete,          // liveness data must be complete here
1080           _verify_regions_disable,            // trash regions not yet recycled
1081           _verify_size_exact,                 // expect generation and heap sizes to match exactly
1082           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1083   );
1084 }
1085 
1086 void ShenandoahVerifier::verify_after_concmark_with_promotions() {
1087   verify_at_safepoint(
1088           "After Mark",
1089           _verify_remembered_disable,         // do not verify remembered set
1090           _verify_forwarded_none,             // no forwarded references
1091           _verify_marked_complete_satb_empty, // bitmaps as precise as we can get, except dangling j.l.r.Refs
1092           _verify_cset_none,                  // no references to cset anymore
1093           _verify_liveness_complete,          // liveness data must be complete here
1094           _verify_regions_disable,            // trash regions not yet recycled
1095           _verify_size_adjusted_for_padding,  // expect generation and heap sizes to match after adjustments
1096                                               // for promote in place padding
1097           _verify_gcstate_stable_weakroots    // heap is still stable, weakroots are in progress
1098   );
1099 }
1100 
1101 void ShenandoahVerifier::verify_before_evacuation() {
1102   verify_at_safepoint(
1103           "Before Evacuation",
1104           _verify_remembered_disable,                // do not verify remembered set
1105           _verify_forwarded_none,                    // no forwarded references
1106           _verify_marked_complete_except_references, // walk over marked objects too
1107           _verify_cset_disable,                      // non-forwarded references to cset expected
1108           _verify_liveness_complete,                 // liveness data must be complete here
1109           _verify_regions_disable,                   // trash regions not yet recycled
1110           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1111                                                      //  for promote in place padding
1112           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1113   );
1114 }
1115 
1116 void ShenandoahVerifier::verify_before_updaterefs() {
1117   verify_at_safepoint(
1118           "Before Updating References",
1119           _verify_remembered_before_updating_references,  // verify read-write remembered set
1120           _verify_forwarded_allow,     // forwarded references allowed
1121           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1122           _verify_cset_forwarded,      // all cset refs are fully forwarded
1123           _verify_liveness_disable,    // no reliable liveness data anymore
1124           _verify_regions_notrash,     // trash regions have been recycled already
1125           _verify_size_exact,          // expect generation and heap sizes to match exactly
1126           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1127   );
1128 }
1129 
1130 // We have not yet cleanup (reclaimed) the collection set
1131 void ShenandoahVerifier::verify_after_updaterefs() {
1132   verify_at_safepoint(
1133           "After Updating References",
1134           _verify_remembered_disable,  // do not verify remembered set
1135           _verify_forwarded_none,      // no forwarded references
1136           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1137           _verify_cset_none,           // no cset references, all updated
1138           _verify_liveness_disable,    // no reliable liveness data anymore
1139           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1140           _verify_size_exact,          // expect generation and heap sizes to match exactly
1141           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1142   );
1143 }
1144 
1145 void ShenandoahVerifier::verify_after_degenerated() {
1146   verify_at_safepoint(
1147           "After Degenerated GC",
1148           _verify_remembered_disable,  // do not verify remembered set
1149           _verify_forwarded_none,      // all objects are non-forwarded
1150           _verify_marked_complete,     // all objects are marked in complete bitmap
1151           _verify_cset_none,           // no cset references
1152           _verify_liveness_disable,    // no reliable liveness data anymore
1153           _verify_regions_notrash_nocset, // no trash, no cset
1154           _verify_size_exact,          // expect generation and heap sizes to match exactly
1155           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1156   );
1157 }
1158 
1159 void ShenandoahVerifier::verify_before_fullgc() {
1160   verify_at_safepoint(
1161           "Before Full GC",
1162           _verify_remembered_disable,  // do not verify remembered set
1163           _verify_forwarded_allow,     // can have forwarded objects
1164           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1165           _verify_cset_disable,        // cset might be foobared
1166           _verify_liveness_disable,    // no reliable liveness data anymore
1167           _verify_regions_disable,     // no reliable region data here
1168           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1169           _verify_gcstate_disable      // no reliable gcstate data
1170   );
1171 }
1172 
1173 void ShenandoahVerifier::verify_after_fullgc() {
1174   verify_at_safepoint(
1175           "After Full GC",
1176           _verify_remembered_after_full_gc,  // verify read-write remembered set
1177           _verify_forwarded_none,      // all objects are non-forwarded
1178           _verify_marked_complete,     // all objects are marked in complete bitmap
1179           _verify_cset_none,           // no cset references
1180           _verify_liveness_disable,    // no reliable liveness data anymore
1181           _verify_regions_notrash_nocset, // no trash, no cset
1182           _verify_size_exact,           // expect generation and heap sizes to match exactly
1183           _verify_gcstate_stable        // full gc cleaned up everything
1184   );
1185 }
1186 
1187 class ShenandoahVerifyNoForwarded : public BasicOopIterateClosure {
1188 private:
1189   template <class T>
1190   void do_oop_work(T* p) {
1191     T o = RawAccess<>::oop_load(p);
1192     if (!CompressedOops::is_null(o)) {
1193       oop obj = CompressedOops::decode_not_null(o);
1194       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1195       if (obj != fwd) {
1196         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1197                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1198       }
1199     }
1200   }
1201 
1202 public:
1203   void do_oop(narrowOop* p) { do_oop_work(p); }
1204   void do_oop(oop* p)       { do_oop_work(p); }
1205 };
1206 
1207 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1208 private:
1209   template <class T>
1210   void do_oop_work(T* p) {
1211     T o = RawAccess<>::oop_load(p);
1212     if (!CompressedOops::is_null(o)) {
1213       oop obj = CompressedOops::decode_not_null(o);
1214       ShenandoahHeap* heap = ShenandoahHeap::heap();
1215 
1216       if (!heap->marking_context()->is_marked_or_old(obj)) {
1217         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1218                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1219       }
1220 
1221       if (heap->in_collection_set(obj)) {
1222         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1223                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1224       }
1225 
1226       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1227       if (obj != fwd) {
1228         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1229                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1230       }
1231     }
1232   }
1233 
1234 public:
1235   void do_oop(narrowOop* p) override { do_oop_work(p); }
1236   void do_oop(oop* p)       override { do_oop_work(p); }
1237 };
1238 
1239 void ShenandoahVerifier::verify_roots_in_to_space() {
1240   ShenandoahVerifyInToSpaceClosure cl;
1241   ShenandoahRootVerifier::roots_do(&cl);
1242 }
1243 
1244 void ShenandoahVerifier::verify_roots_no_forwarded() {
1245   ShenandoahVerifyNoForwarded cl;
1246   ShenandoahRootVerifier::roots_do(&cl);
1247 }
1248 
1249 template<typename Scanner>
1250 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1251 protected:
1252   ShenandoahGenerationalHeap* const _heap;
1253   Scanner*   const _scanner;
1254   const char* _message;
1255 
1256 public:
1257   // Argument distinguishes between initial mark or start of update refs verification.
1258   explicit ShenandoahVerifyRemSetClosure(Scanner* scanner, const char* message) :
1259             _heap(ShenandoahGenerationalHeap::heap()),
1260             _scanner(scanner),
1261             _message(message) {}
1262 
1263   template<class T>
1264   inline void work(T* p) {
1265     T o = RawAccess<>::oop_load(p);
1266     if (!CompressedOops::is_null(o)) {
1267       oop obj = CompressedOops::decode_not_null(o);
1268       if (_heap->is_in_young(obj) && !_scanner->is_card_dirty((HeapWord*) p)) {
1269         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1270                                          _message, "clean card should be dirty", __FILE__, __LINE__);
1271       }
1272     }
1273   }
1274 
1275   void do_oop(narrowOop* p) override { work(p); }
1276   void do_oop(oop* p)       override { work(p); }
1277 };
1278 
1279 ShenandoahMarkingContext* ShenandoahVerifier::get_marking_context_for_old() {
1280   shenandoah_assert_generations_reconciled();
1281   if (_heap->old_generation()->is_mark_complete() || _heap->gc_generation()->is_global()) {
1282     return _heap->complete_marking_context();
1283   }
1284   return nullptr;
1285 }
1286 
1287 template<typename Scanner>
1288 void ShenandoahVerifier::help_verify_region_rem_set(Scanner* scanner, ShenandoahHeapRegion* r, ShenandoahMarkingContext* ctx,
1289                                                     HeapWord* registration_watermark, const char* message) {
1290   ShenandoahVerifyRemSetClosure<Scanner> check_interesting_pointers(scanner, message);
1291   HeapWord* from = r->bottom();
1292   HeapWord* obj_addr = from;
1293   if (r->is_humongous_start()) {
1294     oop obj = cast_to_oop(obj_addr);
1295     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1296       // For humongous objects, the typical object is an array, so the following checks may be overkill
1297       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1298       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1299       if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1300         obj->oop_iterate(&check_interesting_pointers);
1301       }
1302       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1303     }
1304     // else, this humongous object is not live so no need to verify its internal pointers
1305 
1306     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1307       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1308                                        "object not properly registered", __FILE__, __LINE__);
1309     }
1310   } else if (!r->is_humongous()) {
1311     HeapWord* top = r->top();
1312     while (obj_addr < top) {
1313       oop obj = cast_to_oop(obj_addr);
1314       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1315       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1316         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1317         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1318         if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1319           obj->oop_iterate(&check_interesting_pointers);
1320         }
1321         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1322 
1323         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1324           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1325                                            "object not properly registered", __FILE__, __LINE__);
1326         }
1327         obj_addr += obj->size();
1328       } else {
1329         // This object is not live so we don't verify dirty cards contained therein
1330         HeapWord* tams = ctx->top_at_mark_start(r);
1331         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1332       }
1333     }
1334   }
1335 }
1336 
1337 class ShenandoahWriteTableScanner {
1338 private:
1339   ShenandoahScanRemembered* _scanner;
1340 public:
1341   explicit ShenandoahWriteTableScanner(ShenandoahScanRemembered* scanner) : _scanner(scanner) {}
1342 
1343   bool is_card_dirty(HeapWord* obj_addr) {
1344     return _scanner->is_write_card_dirty(obj_addr);
1345   }
1346 
1347   bool verify_registration(HeapWord* obj_addr, ShenandoahMarkingContext* ctx) {
1348     return _scanner->verify_registration(obj_addr, ctx);
1349   }
1350 };
1351 
1352 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1353 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1354 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1355 // restore them until the start of evacuation.
1356 void ShenandoahVerifier::verify_rem_set_before_mark() {
1357   shenandoah_assert_safepoint();
1358   shenandoah_assert_generational();
1359 
1360   ShenandoahMarkingContext* ctx = get_marking_context_for_old();
1361   ShenandoahOldGeneration* old_generation = _heap->old_generation();
1362 
1363   log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young");
1364 
1365   ShenandoahScanRemembered* scanner = old_generation->card_scan();
1366   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1367     ShenandoahHeapRegion* r = _heap->get_region(i);
1368     if (r->is_old() && r->is_active()) {
1369       help_verify_region_rem_set(scanner, r, ctx, r->end(), "Verify init-mark remembered set violation");
1370     }
1371   }
1372 }
1373 
1374 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1375   shenandoah_assert_safepoint();
1376   shenandoah_assert_generational();
1377 
1378   ShenandoahWriteTableScanner scanner(ShenandoahGenerationalHeap::heap()->old_generation()->card_scan());
1379   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1380     ShenandoahHeapRegion* r = _heap->get_region(i);
1381     if (r->is_old() && !r->is_cset()) {
1382       help_verify_region_rem_set(&scanner, r, nullptr, r->top(), "Remembered set violation at end of Full GC");
1383     }
1384   }
1385 }
1386 
1387 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1388 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1389 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1390 // all PLABS are retired immediately before the start of update refs.
1391 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1392   shenandoah_assert_safepoint();
1393   shenandoah_assert_generational();
1394 
1395   ShenandoahMarkingContext* ctx = get_marking_context_for_old();
1396   ShenandoahWriteTableScanner scanner(_heap->old_generation()->card_scan());
1397   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1398     ShenandoahHeapRegion* r = _heap->get_region(i);
1399     if (r->is_old() && !r->is_cset()) {
1400       help_verify_region_rem_set(&scanner, r, ctx, r->get_update_watermark(), "Remembered set violation at init-update-references");
1401     }
1402   }
1403 }
1404 
1405 void ShenandoahVerifier::verify_before_rebuilding_free_set() {
1406   ShenandoahGenerationStatsClosure cl;
1407   _heap->heap_region_iterate(&cl);
1408 
1409   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->old_generation(), cl.old);
1410   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->young_generation(), cl.young);
1411   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->global_generation(), cl.global);
1412 }