1 /*
   2  * Copyright (c) 2017, 2021, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  31 #include "gc/shenandoah/shenandoahGeneration.hpp"
  32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  33 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  34 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
  37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  38 #include "gc/shenandoah/shenandoahUtils.hpp"
  39 #include "gc/shenandoah/shenandoahVerifier.hpp"
  40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  41 #include "memory/allocation.hpp"
  42 #include "memory/iterator.inline.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/compressedOops.inline.hpp"
  45 #include "runtime/atomic.hpp"
  46 #include "runtime/orderAccess.hpp"
  47 #include "runtime/threads.hpp"
  48 #include "utilities/align.hpp"
  49 
  50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  51 #ifdef verify_oop
  52 #undef verify_oop
  53 #endif
  54 
  55 static bool is_instance_ref_klass(Klass* k) {
  56   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  57 }
  58 
  59 class ShenandoahIgnoreReferenceDiscoverer : public ReferenceDiscoverer {
  60 public:
  61   virtual bool discover_reference(oop obj, ReferenceType type) {
  62     return true;
  63   }
  64 };
  65 
  66 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  67 private:
  68   const char* _phase;
  69   ShenandoahVerifier::VerifyOptions _options;
  70   ShenandoahVerifierStack* _stack;
  71   ShenandoahHeap* _heap;
  72   MarkBitMap* _map;
  73   ShenandoahLivenessData* _ld;
  74   void* _interior_loc;
  75   oop _loc;
  76   ShenandoahGeneration* _generation;
  77 
  78 public:
  79   ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
  80                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  81     _phase(phase),
  82     _options(options),
  83     _stack(stack),
  84     _heap(ShenandoahHeap::heap()),
  85     _map(map),
  86     _ld(ld),
  87     _interior_loc(nullptr),
  88     _loc(nullptr),
  89     _generation(nullptr) {
  90     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  91         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  92         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  93       set_ref_discoverer_internal(new ShenandoahIgnoreReferenceDiscoverer());
  94     }
  95 
  96     if (_heap->mode()->is_generational()) {
  97       _generation = _heap->gc_generation();
  98       assert(_generation != nullptr, "Expected active generation in this mode");
  99       shenandoah_assert_generations_reconciled();
 100     }
 101   }
 102 
 103 private:
 104   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 105     if (!test) {
 106       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 107     }
 108   }
 109 
 110   template <class T>
 111   void do_oop_work(T* p) {
 112     T o = RawAccess<>::oop_load(p);
 113     if (!CompressedOops::is_null(o)) {
 114       oop obj = CompressedOops::decode_not_null(o);
 115       if (is_instance_ref_klass(obj->klass())) {
 116         obj = ShenandoahForwarding::get_forwardee(obj);
 117       }
 118       // Single threaded verification can use faster non-atomic stack and bitmap
 119       // methods.
 120       //
 121       // For performance reasons, only fully verify non-marked field values.
 122       // We are here when the host object for *p is already marked.
 123 
 124       // TODO: We should consider specializing this closure by generation ==/!= null,
 125       // to avoid in_generation check on fast path here for non-generational mode.
 126       if (in_generation(obj) && _map->par_mark(obj)) {
 127         verify_oop_at(p, obj);
 128         _stack->push(ShenandoahVerifierTask(obj));
 129       }
 130     }
 131   }
 132 
 133   bool in_generation(oop obj) {
 134     if (_generation == nullptr) {
 135       return true;
 136     }
 137 
 138     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 139     return _generation->contains(region);
 140   }
 141 
 142   void verify_oop(oop obj) {
 143     // Perform consistency checks with gradually decreasing safety level. This guarantees
 144     // that failure report would not try to touch something that was not yet verified to be
 145     // safe to process.
 146 
 147     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in(obj),
 148               "oop must be in heap");
 149     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 150               "oop must be aligned");
 151 
 152     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 153     Klass* obj_klass = obj->klass_or_null();
 154 
 155     // Verify that obj is not in dead space:
 156     {
 157       // Do this before touching obj->size()
 158       check(ShenandoahAsserts::_safe_unknown, obj, obj_klass != nullptr,
 159              "Object klass pointer should not be null");
 160       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 161              "Object klass pointer must go to metaspace");
 162 
 163       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 164       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 165              "Object start should be within the region");
 166 
 167       if (!obj_reg->is_humongous()) {
 168         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + obj->size()) <= obj_reg->top(),
 169                "Object end should be within the region");
 170       } else {
 171         size_t humongous_start = obj_reg->index();
 172         size_t humongous_end = humongous_start + (obj->size() >> ShenandoahHeapRegion::region_size_words_shift());
 173         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 174           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 175                  "Humongous object is in continuation that fits it");
 176         }
 177       }
 178 
 179       // ------------ obj is safe at this point --------------
 180 
 181       check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(),
 182             "Object should be in active region");
 183 
 184       switch (_options._verify_liveness) {
 185         case ShenandoahVerifier::_verify_liveness_disable:
 186           // skip
 187           break;
 188         case ShenandoahVerifier::_verify_liveness_complete:
 189           Atomic::add(&_ld[obj_reg->index()], (uint) obj->size(), memory_order_relaxed);
 190           // fallthrough for fast failure for un-live regions:
 191         case ShenandoahVerifier::_verify_liveness_conservative:
 192           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 193                 (obj_reg->is_old() && _heap->gc_generation()->is_young()),
 194                    "Object must belong to region with live data");
 195           shenandoah_assert_generations_reconciled();
 196           break;
 197         default:
 198           assert(false, "Unhandled liveness verification");
 199       }
 200     }
 201 
 202     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 203 
 204     ShenandoahHeapRegion* fwd_reg = nullptr;
 205 
 206     if (obj != fwd) {
 207       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in(fwd),
 208              "Forwardee must be in heap");
 209       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 210              "Forwardee is set");
 211       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 212              "Forwardee must be aligned");
 213 
 214       // Do this before touching fwd->size()
 215       Klass* fwd_klass = fwd->klass_or_null();
 216       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 217              "Forwardee klass pointer should not be null");
 218       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 219              "Forwardee klass pointer must go to metaspace");
 220       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 221              "Forwardee klass pointer must go to metaspace");
 222 
 223       fwd_reg = _heap->heap_region_containing(fwd);
 224 
 225       // Verify that forwardee is not in the dead space:
 226       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 227              "Should have no humongous forwardees");
 228 
 229       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 230       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 231              "Forwardee start should be within the region");
 232       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + fwd->size()) <= fwd_reg->top(),
 233              "Forwardee end should be within the region");
 234 
 235       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 236       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 237              "Double forwarding");
 238     } else {
 239       fwd_reg = obj_reg;
 240     }
 241 
 242     // ------------ obj and fwd are safe at this point --------------
 243     // We allow for marked or old here for two reasons:
 244     //  1. If this is a young collect, old objects wouldn't be marked. We've
 245     //     recently change the verifier traversal to only follow young objects
 246     //     during a young collect so this _shouldn't_ be necessary.
 247     //  2. At present, we do not clear dead objects from the remembered set.
 248     //     Everything in the remembered set is old (ipso facto), so allowing for
 249     //     'marked_or_old' covers the case of stale objects in rset.
 250     // TODO: Just use 'is_marked' here.
 251     switch (_options._verify_marked) {
 252       case ShenandoahVerifier::_verify_marked_disable:
 253         // skip
 254         break;
 255       case ShenandoahVerifier::_verify_marked_incomplete:
 256         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked_or_old(obj),
 257                "Must be marked in incomplete bitmap");
 258         break;
 259       case ShenandoahVerifier::_verify_marked_complete:
 260         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 261                "Must be marked in complete bitmap");
 262         break;
 263       case ShenandoahVerifier::_verify_marked_complete_except_references:
 264       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 265         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 266               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 267         break;
 268       default:
 269         assert(false, "Unhandled mark verification");
 270     }
 271 
 272     switch (_options._verify_forwarded) {
 273       case ShenandoahVerifier::_verify_forwarded_disable:
 274         // skip
 275         break;
 276       case ShenandoahVerifier::_verify_forwarded_none: {
 277         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 278                "Should not be forwarded");
 279         break;
 280       }
 281       case ShenandoahVerifier::_verify_forwarded_allow: {
 282         if (obj != fwd) {
 283           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 284                  "Forwardee should be in another region");
 285         }
 286         break;
 287       }
 288       default:
 289         assert(false, "Unhandled forwarding verification");
 290     }
 291 
 292     switch (_options._verify_cset) {
 293       case ShenandoahVerifier::_verify_cset_disable:
 294         // skip
 295         break;
 296       case ShenandoahVerifier::_verify_cset_none:
 297         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 298                "Should not have references to collection set");
 299         break;
 300       case ShenandoahVerifier::_verify_cset_forwarded:
 301         if (_heap->in_collection_set(obj)) {
 302           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 303                  "Object in collection set, should have forwardee");
 304         }
 305         break;
 306       default:
 307         assert(false, "Unhandled cset verification");
 308     }
 309 
 310   }
 311 
 312 public:
 313   /**
 314    * Verify object with known interior reference.
 315    * @param p interior reference where the object is referenced from; can be off-heap
 316    * @param obj verified object
 317    */
 318   template <class T>
 319   void verify_oop_at(T* p, oop obj) {
 320     _interior_loc = p;
 321     verify_oop(obj);
 322     _interior_loc = nullptr;
 323   }
 324 
 325   /**
 326    * Verify object without known interior reference.
 327    * Useful when picking up the object at known offset in heap,
 328    * but without knowing what objects reference it.
 329    * @param obj verified object
 330    */
 331   void verify_oop_standalone(oop obj) {
 332     _interior_loc = nullptr;
 333     verify_oop(obj);
 334     _interior_loc = nullptr;
 335   }
 336 
 337   /**
 338    * Verify oop fields from this object.
 339    * @param obj host object for verified fields
 340    */
 341   void verify_oops_from(oop obj) {
 342     _loc = obj;
 343     obj->oop_iterate(this);
 344     _loc = nullptr;
 345   }
 346 
 347   void do_oop(oop* p) override { do_oop_work(p); }
 348   void do_oop(narrowOop* p) override { do_oop_work(p); }
 349 };
 350 
 351 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 352 // a subset (e.g. the young generation or old generation) of the total heap.
 353 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 354 private:
 355   size_t _used, _committed, _garbage, _regions, _humongous_waste, _trashed_regions;
 356 public:
 357   ShenandoahCalculateRegionStatsClosure() :
 358       _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0), _trashed_regions(0) {};
 359 
 360   void heap_region_do(ShenandoahHeapRegion* r) override {
 361     _used += r->used();
 362     _garbage += r->garbage();
 363     _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0;
 364     if (r->is_humongous()) {
 365       _humongous_waste += r->free();
 366     }
 367     if (r->is_trash()) {
 368       _trashed_regions++;
 369     }
 370     _regions++;
 371     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding " SIZE_FORMAT " for %s Region " SIZE_FORMAT ", yielding: " SIZE_FORMAT,
 372             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 373   }
 374 
 375   size_t used() const { return _used; }
 376   size_t committed() const { return _committed; }
 377   size_t garbage() const { return _garbage; }
 378   size_t regions() const { return _regions; }
 379   size_t waste() const { return _humongous_waste; }
 380 
 381   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 382   size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 383   size_t non_trashed_span() const { return (_regions - _trashed_regions) * ShenandoahHeapRegion::region_size_bytes(); }
 384 };
 385 
 386 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 387  public:
 388   ShenandoahCalculateRegionStatsClosure old;
 389   ShenandoahCalculateRegionStatsClosure young;
 390   ShenandoahCalculateRegionStatsClosure global;
 391 
 392   void heap_region_do(ShenandoahHeapRegion* r) override {
 393     switch (r->affiliation()) {
 394       case FREE:
 395         return;
 396       case YOUNG_GENERATION:
 397         young.heap_region_do(r);
 398         global.heap_region_do(r);
 399         break;
 400       case OLD_GENERATION:
 401         old.heap_region_do(r);
 402         global.heap_region_do(r);
 403         break;
 404       default:
 405         ShouldNotReachHere();
 406     }
 407   }
 408 
 409   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 410     log_debug(gc)("Safepoint verification: %s verified usage: " SIZE_FORMAT "%s, recorded usage: " SIZE_FORMAT "%s",
 411                   generation->name(),
 412                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 413                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 414   }
 415 
 416   static void validate_usage(const bool adjust_for_padding,
 417                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 418     ShenandoahHeap* heap = ShenandoahHeap::heap();
 419     size_t generation_used = generation->used();
 420     size_t generation_used_regions = generation->used_regions();
 421     if (adjust_for_padding && (generation->is_young() || generation->is_global())) {
 422       size_t pad = heap->old_generation()->get_pad_for_promote_in_place();
 423       generation_used += pad;
 424     }
 425 
 426     guarantee(stats.used() == generation_used,
 427               "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT,
 428               label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats.used()));
 429 
 430     guarantee(stats.regions() == generation_used_regions,
 431               "%s: generation (%s) used regions (" SIZE_FORMAT ") must equal regions that are in use (" SIZE_FORMAT ")",
 432               label, generation->name(), generation->used_regions(), stats.regions());
 433 
 434     size_t generation_capacity = generation->max_capacity();
 435     guarantee(stats.non_trashed_span() <= generation_capacity,
 436               "%s: generation (%s) size spanned by regions (" SIZE_FORMAT ") * region size (" PROPERFMT
 437               ") must not exceed current capacity (" PROPERFMT ")",
 438               label, generation->name(), stats.regions(), PROPERFMTARGS(ShenandoahHeapRegion::region_size_bytes()),
 439               PROPERFMTARGS(generation_capacity));
 440 
 441     size_t humongous_waste = generation->get_humongous_waste();
 442     guarantee(stats.waste() == humongous_waste,
 443               "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT,
 444               label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste()));
 445   }
 446 };
 447 
 448 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 449 private:
 450   ShenandoahHeap* _heap;
 451   const char* _phase;
 452   ShenandoahVerifier::VerifyRegions _regions;
 453 public:
 454   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 455     _heap(ShenandoahHeap::heap()),
 456     _phase(phase),
 457     _regions(regions) {};
 458 
 459   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 460     ResourceMark rm;
 461 
 462     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 463 
 464     stringStream ss;
 465     r->print_on(&ss);
 466     msg.append("%s", ss.as_string());
 467 
 468     report_vm_error(__FILE__, __LINE__, msg.buffer());
 469   }
 470 
 471   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 472     if (!test) {
 473       print_failure(r, msg);
 474     }
 475   }
 476 
 477   void heap_region_do(ShenandoahHeapRegion* r) override {
 478     switch (_regions) {
 479       case ShenandoahVerifier::_verify_regions_disable:
 480         break;
 481       case ShenandoahVerifier::_verify_regions_notrash:
 482         verify(r, !r->is_trash(),
 483                "Should not have trash regions");
 484         break;
 485       case ShenandoahVerifier::_verify_regions_nocset:
 486         verify(r, !r->is_cset(),
 487                "Should not have cset regions");
 488         break;
 489       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 490         verify(r, !r->is_trash(),
 491                "Should not have trash regions");
 492         verify(r, !r->is_cset(),
 493                "Should not have cset regions");
 494         break;
 495       default:
 496         ShouldNotReachHere();
 497     }
 498 
 499     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 500            "Capacity should match region size");
 501 
 502     verify(r, r->bottom() <= r->top(),
 503            "Region top should not be less than bottom");
 504 
 505     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 506            "Region TAMS should not be less than bottom");
 507 
 508     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 509            "Complete TAMS should not be larger than top");
 510 
 511     verify(r, r->get_live_data_bytes() <= r->capacity(),
 512            "Live data cannot be larger than capacity");
 513 
 514     verify(r, r->garbage() <= r->capacity(),
 515            "Garbage cannot be larger than capacity");
 516 
 517     verify(r, r->used() <= r->capacity(),
 518            "Used cannot be larger than capacity");
 519 
 520     verify(r, r->get_shared_allocs() <= r->capacity(),
 521            "Shared alloc count should not be larger than capacity");
 522 
 523     verify(r, r->get_tlab_allocs() <= r->capacity(),
 524            "TLAB alloc count should not be larger than capacity");
 525 
 526     verify(r, r->get_gclab_allocs() <= r->capacity(),
 527            "GCLAB alloc count should not be larger than capacity");
 528 
 529     verify(r, r->get_plab_allocs() <= r->capacity(),
 530            "PLAB alloc count should not be larger than capacity");
 531 
 532     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 533            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 534 
 535     verify(r, !r->is_empty() || !r->has_live(),
 536            "Empty regions should not have live data");
 537 
 538     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 539            "Transitional: region flags and collection set agree");
 540   }
 541 };
 542 
 543 class ShenandoahVerifierReachableTask : public WorkerTask {
 544 private:
 545   const char* _label;
 546   ShenandoahVerifier::VerifyOptions _options;
 547   ShenandoahHeap* _heap;
 548   ShenandoahLivenessData* _ld;
 549   MarkBitMap* _bitmap;
 550   volatile size_t _processed;
 551 
 552 public:
 553   ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
 554                                   ShenandoahLivenessData* ld,
 555                                   const char* label,
 556                                   ShenandoahVerifier::VerifyOptions options) :
 557     WorkerTask("Shenandoah Verifier Reachable Objects"),
 558     _label(label),
 559     _options(options),
 560     _heap(ShenandoahHeap::heap()),
 561     _ld(ld),
 562     _bitmap(bitmap),
 563     _processed(0) {};
 564 
 565   size_t processed() const {
 566     return _processed;
 567   }
 568 
 569   void work(uint worker_id) override {
 570     ResourceMark rm;
 571     ShenandoahVerifierStack stack;
 572 
 573     // On level 2, we need to only check the roots once.
 574     // On level 3, we want to check the roots, and seed the local stack.
 575     // It is a lesser evil to accept multiple root scans at level 3, because
 576     // extended parallelism would buy us out.
 577     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 578         || (ShenandoahVerifyLevel >= 3)) {
 579         ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 580                                       ShenandoahMessageBuffer("%s, Roots", _label),
 581                                       _options);
 582         if (_heap->unload_classes()) {
 583           ShenandoahRootVerifier::strong_roots_do(&cl);
 584         } else {
 585           ShenandoahRootVerifier::roots_do(&cl);
 586         }
 587     }
 588 
 589     size_t processed = 0;
 590 
 591     if (ShenandoahVerifyLevel >= 3) {
 592       ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 593                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 594                                     _options);
 595       while (!stack.is_empty()) {
 596         processed++;
 597         ShenandoahVerifierTask task = stack.pop();
 598         cl.verify_oops_from(task.obj());
 599       }
 600     }
 601 
 602     Atomic::add(&_processed, processed, memory_order_relaxed);
 603   }
 604 };
 605 
 606 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 607 public:
 608   void do_thread(Thread* thread) override {
 609     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 610     if (!queue.is_empty()) {
 611       fatal("All SATB buffers should have been flushed during mark");
 612     }
 613   }
 614 };
 615 
 616 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 617 private:
 618   const char* _label;
 619   ShenandoahVerifier::VerifyOptions _options;
 620   ShenandoahHeap *_heap;
 621   MarkBitMap* _bitmap;
 622   ShenandoahLivenessData* _ld;
 623   volatile size_t _claimed;
 624   volatile size_t _processed;
 625   ShenandoahGeneration* _generation;
 626 
 627 public:
 628   ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
 629                                      ShenandoahLivenessData* ld,
 630                                      const char* label,
 631                                      ShenandoahVerifier::VerifyOptions options) :
 632           WorkerTask("Shenandoah Verifier Marked Objects"),
 633           _label(label),
 634           _options(options),
 635           _heap(ShenandoahHeap::heap()),
 636           _bitmap(bitmap),
 637           _ld(ld),
 638           _claimed(0),
 639           _processed(0),
 640           _generation(nullptr) {
 641     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 642       Threads::change_thread_claim_token();
 643     }
 644 
 645     if (_heap->mode()->is_generational()) {
 646       _generation = _heap->gc_generation();
 647       assert(_generation != nullptr, "Expected active generation in this mode.");
 648       shenandoah_assert_generations_reconciled();
 649     }
 650   };
 651 
 652   size_t processed() {
 653     return Atomic::load(&_processed);
 654   }
 655 
 656   void work(uint worker_id) override {
 657     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 658       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 659       Threads::possibly_parallel_threads_do(true, &verify_satb);
 660     }
 661 
 662     ShenandoahVerifierStack stack;
 663     ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 664                                   ShenandoahMessageBuffer("%s, Marked", _label),
 665                                   _options);
 666 
 667     while (true) {
 668       size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed);
 669       if (v < _heap->num_regions()) {
 670         ShenandoahHeapRegion* r = _heap->get_region(v);
 671         if (!in_generation(r)) {
 672           continue;
 673         }
 674 
 675         if (!r->is_humongous() && !r->is_trash()) {
 676           work_regular(r, stack, cl);
 677         } else if (r->is_humongous_start()) {
 678           work_humongous(r, stack, cl);
 679         }
 680       } else {
 681         break;
 682       }
 683     }
 684   }
 685 
 686   bool in_generation(ShenandoahHeapRegion* r) {
 687     return _generation == nullptr || _generation->contains(r);
 688   }
 689 
 690   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 691     size_t processed = 0;
 692     HeapWord* obj = r->bottom();
 693     if (_heap->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 694       verify_and_follow(obj, stack, cl, &processed);
 695     }
 696     Atomic::add(&_processed, processed, memory_order_relaxed);
 697   }
 698 
 699   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 700     size_t processed = 0;
 701     ShenandoahMarkingContext* ctx = _heap->complete_marking_context();
 702     HeapWord* tams = ctx->top_at_mark_start(r);
 703 
 704     // Bitmaps, before TAMS
 705     if (tams > r->bottom()) {
 706       HeapWord* start = r->bottom();
 707       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 708 
 709       while (addr < tams) {
 710         verify_and_follow(addr, stack, cl, &processed);
 711         addr += 1;
 712         if (addr < tams) {
 713           addr = ctx->get_next_marked_addr(addr, tams);
 714         }
 715       }
 716     }
 717 
 718     // Size-based, after TAMS
 719     {
 720       HeapWord* limit = r->top();
 721       HeapWord* addr = tams;
 722 
 723       while (addr < limit) {
 724         verify_and_follow(addr, stack, cl, &processed);
 725         addr += cast_to_oop(addr)->size();
 726       }
 727     }
 728 
 729     Atomic::add(&_processed, processed, memory_order_relaxed);
 730   }
 731 
 732   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 733     if (!_bitmap->par_mark(addr)) return;
 734 
 735     // Verify the object itself:
 736     oop obj = cast_to_oop(addr);
 737     cl.verify_oop_standalone(obj);
 738 
 739     // Verify everything reachable from that object too, hopefully realizing
 740     // everything was already marked, and never touching further:
 741     if (!is_instance_ref_klass(obj->klass())) {
 742       cl.verify_oops_from(obj);
 743       (*processed)++;
 744     }
 745     while (!stack.is_empty()) {
 746       ShenandoahVerifierTask task = stack.pop();
 747       cl.verify_oops_from(task.obj());
 748       (*processed)++;
 749     }
 750   }
 751 };
 752 
 753 class VerifyThreadGCState : public ThreadClosure {
 754 private:
 755   const char* const _label;
 756          char const _expected;
 757 
 758 public:
 759   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 760   void do_thread(Thread* t) override {
 761     char actual = ShenandoahThreadLocalData::gc_state(t);
 762     if (!verify_gc_state(actual, _expected)) {
 763       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 764     }
 765   }
 766 
 767   static bool verify_gc_state(char actual, char expected) {
 768     // Old generation marking is allowed in all states.
 769     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 770       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 771     } else {
 772       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 773       return (actual == expected);
 774     }
 775   }
 776 };
 777 
 778 void ShenandoahVerifier::verify_at_safepoint(const char* label,
 779                                              VerifyRememberedSet remembered,
 780                                              VerifyForwarded forwarded, VerifyMarked marked,
 781                                              VerifyCollectionSet cset,
 782                                              VerifyLiveness liveness, VerifyRegions regions,
 783                                              VerifySize sizeness,
 784                                              VerifyGCState gcstate) {
 785   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 786   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 787 
 788   ShenandoahHeap::heap()->propagate_gc_state_to_java_threads();
 789 
 790   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 791   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 792 
 793   log_info(gc,start)("Verify %s, Level " INTX_FORMAT, label, ShenandoahVerifyLevel);
 794 
 795   // GC state checks
 796   {
 797     char expected = -1;
 798     bool enabled;
 799     switch (gcstate) {
 800       case _verify_gcstate_disable:
 801         enabled = false;
 802         break;
 803       case _verify_gcstate_forwarded:
 804         enabled = true;
 805         expected = ShenandoahHeap::HAS_FORWARDED;
 806         break;
 807       case _verify_gcstate_evacuation:
 808         enabled = true;
 809         expected = ShenandoahHeap::EVACUATION;
 810         if (!_heap->is_stw_gc_in_progress()) {
 811           // Only concurrent GC sets this.
 812           expected |= ShenandoahHeap::WEAK_ROOTS;
 813         }
 814         break;
 815       case _verify_gcstate_updating:
 816         enabled = true;
 817         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATEREFS;
 818         break;
 819       case _verify_gcstate_stable:
 820         enabled = true;
 821         expected = ShenandoahHeap::STABLE;
 822         break;
 823       case _verify_gcstate_stable_weakroots:
 824         enabled = true;
 825         expected = ShenandoahHeap::STABLE;
 826         if (!_heap->is_stw_gc_in_progress()) {
 827           // Only concurrent GC sets this.
 828           expected |= ShenandoahHeap::WEAK_ROOTS;
 829         }
 830         break;
 831       default:
 832         enabled = false;
 833         assert(false, "Unhandled gc-state verification");
 834     }
 835 
 836     if (enabled) {
 837       char actual = _heap->gc_state();
 838 
 839       bool is_marking = (actual & ShenandoahHeap::MARKING);
 840       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING));
 841       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 842 
 843       // Old generation marking is allowed in all states.
 844       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 845         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 846       }
 847 
 848       VerifyThreadGCState vtgcs(label, expected);
 849       Threads::java_threads_do(&vtgcs);
 850     }
 851   }
 852 
 853   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 854   ShenandoahGCStateResetter resetter;
 855 
 856   // Heap size checks
 857   {
 858     ShenandoahHeapLocker lock(_heap->lock());
 859 
 860     ShenandoahCalculateRegionStatsClosure cl;
 861     _heap->heap_region_iterate(&cl);
 862     size_t heap_used;
 863     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 864       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 865       heap_used = _heap->used() + _heap->old_generation()->get_pad_for_promote_in_place();
 866     } else if (sizeness != _verify_size_disable) {
 867       heap_used = _heap->used();
 868     }
 869     if (sizeness != _verify_size_disable) {
 870       guarantee(cl.used() == heap_used,
 871                 "%s: heap used size must be consistent: heap-used = " SIZE_FORMAT "%s, regions-used = " SIZE_FORMAT "%s",
 872                 label,
 873                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 874                 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used()));
 875     }
 876     size_t heap_committed = _heap->committed();
 877     guarantee(cl.committed() == heap_committed,
 878               "%s: heap committed size must be consistent: heap-committed = " SIZE_FORMAT "%s, regions-committed = " SIZE_FORMAT "%s",
 879               label,
 880               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 881               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 882   }
 883 
 884   log_debug(gc)("Safepoint verification finished heap usage verification");
 885 
 886   ShenandoahGeneration* generation;
 887   if (_heap->mode()->is_generational()) {
 888     generation = _heap->gc_generation();
 889     guarantee(generation != nullptr, "Need to know which generation to verify.");
 890     shenandoah_assert_generations_reconciled();
 891   } else {
 892     generation = nullptr;
 893   }
 894 
 895   if (generation != nullptr) {
 896     ShenandoahHeapLocker lock(_heap->lock());
 897 
 898     switch (remembered) {
 899       case _verify_remembered_disable:
 900         break;
 901       case _verify_remembered_before_marking:
 902         log_debug(gc)("Safepoint verification of remembered set at mark");
 903         verify_rem_set_before_mark();
 904         break;
 905       case _verify_remembered_before_updating_references:
 906         log_debug(gc)("Safepoint verification of remembered set at update ref");
 907         verify_rem_set_before_update_ref();
 908         break;
 909       case _verify_remembered_after_full_gc:
 910         log_debug(gc)("Safepoint verification of remembered set after full gc");
 911         verify_rem_set_after_full_gc();
 912         break;
 913       default:
 914         fatal("Unhandled remembered set verification mode");
 915     }
 916 
 917     ShenandoahGenerationStatsClosure cl;
 918     _heap->heap_region_iterate(&cl);
 919 
 920     if (LogTarget(Debug, gc)::is_enabled()) {
 921       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl.old);
 922       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl.young);
 923       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global);
 924     }
 925     if (sizeness == _verify_size_adjusted_for_padding) {
 926       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 927       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young);
 928       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global);
 929     } else if (sizeness == _verify_size_exact) {
 930       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 931       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young);
 932       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global);
 933     }
 934     // else: sizeness must equal _verify_size_disable
 935   }
 936 
 937   log_debug(gc)("Safepoint verification finished remembered set verification");
 938 
 939   // Internal heap region checks
 940   if (ShenandoahVerifyLevel >= 1) {
 941     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 942     if (generation != nullptr) {
 943       generation->heap_region_iterate(&cl);
 944     } else {
 945       _heap->heap_region_iterate(&cl);
 946     }
 947   }
 948 
 949   log_debug(gc)("Safepoint verification finished heap region closure verification");
 950 
 951   OrderAccess::fence();
 952 
 953   if (UseTLAB) {
 954     _heap->labs_make_parsable();
 955   }
 956 
 957   // Allocate temporary bitmap for storing marking wavefront:
 958   _verification_bit_map->clear();
 959 
 960   // Allocate temporary array for storing liveness data
 961   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
 962   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
 963 
 964   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
 965 
 966   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
 967   // This verifies what application can see, since it only cares about reachable objects.
 968   size_t count_reachable = 0;
 969   if (ShenandoahVerifyLevel >= 2) {
 970     ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
 971     _heap->workers()->run_task(&task);
 972     count_reachable = task.processed();
 973   }
 974 
 975   log_debug(gc)("Safepoint verification finished getting initial reachable set");
 976 
 977   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
 978   // not the application, can see during the region scans. There is no reason to process the objects
 979   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
 980   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
 981   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
 982   // version
 983 
 984   size_t count_marked = 0;
 985   if (ShenandoahVerifyLevel >= 4 &&
 986         (marked == _verify_marked_complete ||
 987          marked == _verify_marked_complete_except_references ||
 988          marked == _verify_marked_complete_satb_empty)) {
 989     guarantee(_heap->marking_context()->is_complete(), "Marking context should be complete");
 990     ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
 991     _heap->workers()->run_task(&task);
 992     count_marked = task.processed();
 993   } else {
 994     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
 995   }
 996 
 997   log_debug(gc)("Safepoint verification finished walking marked objects");
 998 
 999   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
1000   // marked objects.
1001 
1002   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
1003     for (size_t i = 0; i < _heap->num_regions(); i++) {
1004       ShenandoahHeapRegion* r = _heap->get_region(i);
1005       if (generation != nullptr && !generation->contains(r)) {
1006         continue;
1007       }
1008 
1009       juint verf_live = 0;
1010       if (r->is_humongous()) {
1011         // For humongous objects, test if start region is marked live, and if so,
1012         // all humongous regions in that chain have live data equal to their "used".
1013         juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]);
1014         if (start_live > 0) {
1015           verf_live = (juint)(r->used() / HeapWordSize);
1016         }
1017       } else {
1018         verf_live = Atomic::load(&ld[r->index()]);
1019       }
1020 
1021       size_t reg_live = r->get_live_data_words();
1022       if (reg_live != verf_live) {
1023         stringStream ss;
1024         r->print_on(&ss);
1025         fatal("%s: Live data should match: region-live = " SIZE_FORMAT ", verifier-live = " UINT32_FORMAT "\n%s",
1026               label, reg_live, verf_live, ss.freeze());
1027       }
1028     }
1029   }
1030 
1031   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1032 
1033 
1034   log_info(gc)("Verify %s, Level " INTX_FORMAT " (" SIZE_FORMAT " reachable, " SIZE_FORMAT " marked)",
1035                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1036 
1037   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1038 }
1039 
1040 void ShenandoahVerifier::verify_generic(VerifyOption vo) {
1041   verify_at_safepoint(
1042           "Generic Verification",
1043           _verify_remembered_disable,  // do not verify remembered set
1044           _verify_forwarded_allow,     // conservatively allow forwarded
1045           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1046           _verify_cset_disable,        // cset may be inconsistent
1047           _verify_liveness_disable,    // no reliable liveness data
1048           _verify_regions_disable,     // no reliable region data
1049           _verify_size_exact,          // expect generation and heap sizes to match exactly
1050           _verify_gcstate_disable      // no data about gcstate
1051   );
1052 }
1053 
1054 void ShenandoahVerifier::verify_before_concmark() {
1055     verify_at_safepoint(
1056           "Before Mark",
1057           _verify_remembered_before_marking,
1058                                        // verify read-only remembered set from bottom() to top()
1059           _verify_forwarded_none,      // UR should have fixed up
1060           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1061           _verify_cset_none,           // UR should have fixed this
1062           _verify_liveness_disable,    // no reliable liveness data
1063           _verify_regions_notrash,     // no trash regions
1064           _verify_size_exact,          // expect generation and heap sizes to match exactly
1065           _verify_gcstate_stable       // there are no forwarded objects
1066   );
1067 }
1068 
1069 void ShenandoahVerifier::verify_after_concmark() {
1070   verify_at_safepoint(
1071           "After Mark",
1072           _verify_remembered_disable,  // do not verify remembered set
1073           _verify_forwarded_none,      // no forwarded references
1074           _verify_marked_complete_satb_empty,
1075                                        // bitmaps as precise as we can get, except dangling j.l.r.Refs
1076           _verify_cset_none,           // no references to cset anymore
1077           _verify_liveness_complete,   // liveness data must be complete here
1078           _verify_regions_disable,     // trash regions not yet recycled
1079           _verify_size_exact,          // expect generation and heap sizes to match exactly
1080           _verify_gcstate_stable_weakroots  // heap is still stable, weakroots are in progress
1081   );
1082 }
1083 
1084 void ShenandoahVerifier::verify_before_evacuation() {
1085   verify_at_safepoint(
1086           "Before Evacuation",
1087           _verify_remembered_disable,                // do not verify remembered set
1088           _verify_forwarded_none,                    // no forwarded references
1089           _verify_marked_complete_except_references, // walk over marked objects too
1090           _verify_cset_disable,                      // non-forwarded references to cset expected
1091           _verify_liveness_complete,                 // liveness data must be complete here
1092           _verify_regions_disable,                   // trash regions not yet recycled
1093           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1094                                                      //  for promote in place padding
1095           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1096   );
1097 }
1098 
1099 void ShenandoahVerifier::verify_during_evacuation() {
1100   verify_at_safepoint(
1101           "During Evacuation",
1102           _verify_remembered_disable, // do not verify remembered set
1103           _verify_forwarded_allow,    // some forwarded references are allowed
1104           _verify_marked_disable,     // walk only roots
1105           _verify_cset_disable,       // some cset references are not forwarded yet
1106           _verify_liveness_disable,   // liveness data might be already stale after pre-evacs
1107           _verify_regions_disable,    // trash regions not yet recycled
1108           _verify_size_disable,       // we don't know how much of promote-in-place work has been completed
1109           _verify_gcstate_evacuation  // evacuation is in progress
1110   );
1111 }
1112 
1113 void ShenandoahVerifier::verify_after_evacuation() {
1114   verify_at_safepoint(
1115           "After Evacuation",
1116           _verify_remembered_disable,  // do not verify remembered set
1117           _verify_forwarded_allow,     // objects are still forwarded
1118           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1119           _verify_cset_forwarded,      // all cset refs are fully forwarded
1120           _verify_liveness_disable,    // no reliable liveness data anymore
1121           _verify_regions_notrash,     // trash regions have been recycled already
1122           _verify_size_exact,          // expect generation and heap sizes to match exactly
1123           _verify_gcstate_forwarded    // evacuation produced some forwarded objects
1124   );
1125 }
1126 
1127 void ShenandoahVerifier::verify_before_updaterefs() {
1128   verify_at_safepoint(
1129           "Before Updating References",
1130           _verify_remembered_before_updating_references,  // verify read-write remembered set
1131           _verify_forwarded_allow,     // forwarded references allowed
1132           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1133           _verify_cset_forwarded,      // all cset refs are fully forwarded
1134           _verify_liveness_disable,    // no reliable liveness data anymore
1135           _verify_regions_notrash,     // trash regions have been recycled already
1136           _verify_size_exact,          // expect generation and heap sizes to match exactly
1137           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1138   );
1139 }
1140 
1141 // We have not yet cleanup (reclaimed) the collection set
1142 void ShenandoahVerifier::verify_after_updaterefs() {
1143   verify_at_safepoint(
1144           "After Updating References",
1145           _verify_remembered_disable,  // do not verify remembered set
1146           _verify_forwarded_none,      // no forwarded references
1147           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1148           _verify_cset_none,           // no cset references, all updated
1149           _verify_liveness_disable,    // no reliable liveness data anymore
1150           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1151           _verify_size_exact,          // expect generation and heap sizes to match exactly
1152           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1153   );
1154 }
1155 
1156 void ShenandoahVerifier::verify_after_degenerated() {
1157   verify_at_safepoint(
1158           "After Degenerated GC",
1159           _verify_remembered_disable,  // do not verify remembered set
1160           _verify_forwarded_none,      // all objects are non-forwarded
1161           _verify_marked_complete,     // all objects are marked in complete bitmap
1162           _verify_cset_none,           // no cset references
1163           _verify_liveness_disable,    // no reliable liveness data anymore
1164           _verify_regions_notrash_nocset, // no trash, no cset
1165           _verify_size_exact,          // expect generation and heap sizes to match exactly
1166           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1167   );
1168 }
1169 
1170 void ShenandoahVerifier::verify_before_fullgc() {
1171   verify_at_safepoint(
1172           "Before Full GC",
1173           _verify_remembered_disable,  // do not verify remembered set
1174           _verify_forwarded_allow,     // can have forwarded objects
1175           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1176           _verify_cset_disable,        // cset might be foobared
1177           _verify_liveness_disable,    // no reliable liveness data anymore
1178           _verify_regions_disable,     // no reliable region data here
1179           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1180           _verify_gcstate_disable      // no reliable gcstate data
1181   );
1182 }
1183 
1184 void ShenandoahVerifier::verify_after_fullgc() {
1185   verify_at_safepoint(
1186           "After Full GC",
1187           _verify_remembered_after_full_gc,  // verify read-write remembered set
1188           _verify_forwarded_none,      // all objects are non-forwarded
1189           _verify_marked_complete,     // all objects are marked in complete bitmap
1190           _verify_cset_none,           // no cset references
1191           _verify_liveness_disable,    // no reliable liveness data anymore
1192           _verify_regions_notrash_nocset, // no trash, no cset
1193           _verify_size_exact,           // expect generation and heap sizes to match exactly
1194           _verify_gcstate_stable        // full gc cleaned up everything
1195   );
1196 }
1197 
1198 // TODO: Why this closure does not visit metadata?
1199 class ShenandoahVerifyNoForwared : public BasicOopIterateClosure {
1200 private:
1201   template <class T>
1202   void do_oop_work(T* p) {
1203     T o = RawAccess<>::oop_load(p);
1204     if (!CompressedOops::is_null(o)) {
1205       oop obj = CompressedOops::decode_not_null(o);
1206       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1207       if (obj != fwd) {
1208         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1209                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1210       }
1211     }
1212   }
1213 
1214 public:
1215   void do_oop(narrowOop* p) { do_oop_work(p); }
1216   void do_oop(oop* p)       { do_oop_work(p); }
1217 };
1218 
1219 // TODO: Why this closure does not visit metadata?
1220 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1221 private:
1222   template <class T>
1223   void do_oop_work(T* p) {
1224     T o = RawAccess<>::oop_load(p);
1225     if (!CompressedOops::is_null(o)) {
1226       oop obj = CompressedOops::decode_not_null(o);
1227       ShenandoahHeap* heap = ShenandoahHeap::heap();
1228 
1229       if (!heap->marking_context()->is_marked_or_old(obj)) {
1230         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1231                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1232       }
1233 
1234       if (heap->in_collection_set(obj)) {
1235         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1236                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1237       }
1238 
1239       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1240       if (obj != fwd) {
1241         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1242                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1243       }
1244     }
1245   }
1246 
1247 public:
1248   void do_oop(narrowOop* p) override { do_oop_work(p); }
1249   void do_oop(oop* p)       override { do_oop_work(p); }
1250 };
1251 
1252 void ShenandoahVerifier::verify_roots_in_to_space() {
1253   ShenandoahVerifyInToSpaceClosure cl;
1254   ShenandoahRootVerifier::roots_do(&cl);
1255 }
1256 
1257 void ShenandoahVerifier::verify_roots_no_forwarded() {
1258   ShenandoahVerifyNoForwared cl;
1259   ShenandoahRootVerifier::roots_do(&cl);
1260 }
1261 
1262 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1263 protected:
1264   bool                        const _init_mark;
1265   ShenandoahGenerationalHeap* const _heap;
1266   RememberedScanner*          const _scanner;
1267 
1268 public:
1269   // Argument distinguishes between initial mark or start of update refs verification.
1270   explicit ShenandoahVerifyRemSetClosure(bool init_mark) :
1271             _init_mark(init_mark),
1272             _heap(ShenandoahGenerationalHeap::heap()),
1273             _scanner(_heap->old_generation()->card_scan()) {}
1274 
1275   template<class T>
1276   inline void work(T* p) {
1277     T o = RawAccess<>::oop_load(p);
1278     if (!CompressedOops::is_null(o)) {
1279       oop obj = CompressedOops::decode_not_null(o);
1280       if (_heap->is_in_young(obj)) {
1281         size_t card_index = _scanner->card_index_for_addr((HeapWord*) p);
1282         if (_init_mark && !_scanner->is_card_dirty(card_index)) {
1283           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1284                                            "Verify init-mark remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1285         } else if (!_init_mark && !_scanner->is_write_card_dirty(card_index)) {
1286           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1287                                            "Verify init-update-refs remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1288         }
1289       }
1290     }
1291   }
1292 
1293   void do_oop(narrowOop* p) override { work(p); }
1294   void do_oop(oop* p)       override { work(p); }
1295 };
1296 
1297 void ShenandoahVerifier::help_verify_region_rem_set(ShenandoahHeapRegion* r, ShenandoahMarkingContext* ctx, HeapWord* from,
1298                                                     HeapWord* top, HeapWord* registration_watermark, const char* message) {
1299   RememberedScanner* scanner = ShenandoahGenerationalHeap::heap()->old_generation()->card_scan();
1300   ShenandoahVerifyRemSetClosure check_interesting_pointers(false);
1301 
1302   HeapWord* obj_addr = from;
1303   if (r->is_humongous_start()) {
1304     oop obj = cast_to_oop(obj_addr);
1305     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1306       size_t card_index = scanner->card_index_for_addr(obj_addr);
1307       // For humongous objects, the typical object is an array, so the following checks may be overkill
1308       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1309       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1310       if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1311         obj->oop_iterate(&check_interesting_pointers);
1312       }
1313       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1314     }
1315     // else, this humongous object is not live so no need to verify its internal pointers
1316 
1317     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1318       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1319                                        "object not properly registered", __FILE__, __LINE__);
1320     }
1321   } else if (!r->is_humongous()) {
1322     while (obj_addr < top) {
1323       oop obj = cast_to_oop(obj_addr);
1324       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1325       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1326         size_t card_index = scanner->card_index_for_addr(obj_addr);
1327         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1328         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1329         if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1330           obj->oop_iterate(&check_interesting_pointers);
1331         }
1332         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1333 
1334         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1335           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1336                                            "object not properly registered", __FILE__, __LINE__);
1337         }
1338         obj_addr += obj->size();
1339       } else {
1340         // This object is not live so we don't verify dirty cards contained therein
1341         HeapWord* tams = ctx->top_at_mark_start(r);
1342         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1343       }
1344     }
1345   }
1346 }
1347 
1348 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1349 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1350 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1351 // restore them until the start of evacuation.
1352 void ShenandoahVerifier::verify_rem_set_before_mark() {
1353   shenandoah_assert_safepoint();
1354   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1355 
1356   RememberedScanner* scanner = ShenandoahGenerationalHeap::heap()->old_generation()->card_scan();
1357   ShenandoahVerifyRemSetClosure check_interesting_pointers(true);
1358   ShenandoahMarkingContext* ctx;
1359 
1360   ShenandoahOldGeneration* old_generation = _heap->old_generation();
1361   log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young");
1362 
1363   shenandoah_assert_generations_reconciled();
1364   if (old_generation->is_mark_complete() || _heap->gc_generation()->is_global()) {
1365     ctx = _heap->complete_marking_context();
1366   } else {
1367     ctx = nullptr;
1368   }
1369 
1370   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1371     ShenandoahHeapRegion* r = _heap->get_region(i);
1372     HeapWord* tams = (ctx != nullptr) ? ctx->top_at_mark_start(r) : nullptr;
1373 
1374     // TODO: Is this replaceable with call to help_verify_region_rem_set?
1375 
1376     if (r->is_old() && r->is_active()) {
1377       HeapWord* obj_addr = r->bottom();
1378       if (r->is_humongous_start()) {
1379         oop obj = cast_to_oop(obj_addr);
1380         if ((ctx == nullptr) || ctx->is_marked(obj)) {
1381           // For humongous objects, the typical object is an array, so the following checks may be overkill
1382           // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1383           // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1384           if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1385             obj->oop_iterate(&check_interesting_pointers);
1386           }
1387           // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1388         }
1389         // else, this humongous object is not marked so no need to verify its internal pointers
1390         if (!scanner->verify_registration(obj_addr, ctx)) {
1391           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1392                                            "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1393         }
1394       } else if (!r->is_humongous()) {
1395         HeapWord* top = r->top();
1396         while (obj_addr < top) {
1397           oop obj = cast_to_oop(obj_addr);
1398           // ctx->is_marked() returns true if mark bit set (TAMS not relevant during init mark)
1399           if ((ctx == nullptr) || ctx->is_marked(obj)) {
1400             // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1401             // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1402             if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1403               obj->oop_iterate(&check_interesting_pointers);
1404             }
1405             // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1406             if (!scanner->verify_registration(obj_addr, ctx)) {
1407               ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1408                                                "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1409             }
1410             obj_addr += obj->size();
1411           } else {
1412             // This object is not live so we don't verify dirty cards contained therein
1413             assert(tams != nullptr, "If object is not live, ctx and tams should be non-null");
1414             obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1415           }
1416         }
1417       } // else, we ignore humongous continuation region
1418     } // else, this is not an OLD region so we ignore it
1419   } // all regions have been processed
1420 }
1421 
1422 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1423   shenandoah_assert_safepoint();
1424   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1425 
1426   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1427     ShenandoahHeapRegion* r = _heap->get_region(i);
1428     if (r->is_old() && !r->is_cset()) {
1429       help_verify_region_rem_set(r, nullptr, r->bottom(), r->top(), r->top(), "Remembered set violation at end of Full GC");
1430     }
1431   }
1432 }
1433 
1434 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1435 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1436 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1437 // all PLABS are retired immediately before the start of update refs.
1438 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1439   shenandoah_assert_safepoint();
1440   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1441 
1442   ShenandoahMarkingContext* ctx;
1443 
1444   shenandoah_assert_generations_reconciled();
1445   if (_heap->old_generation()->is_mark_complete() || _heap->gc_generation()->is_global()) {
1446     ctx = _heap->complete_marking_context();
1447   } else {
1448     ctx = nullptr;
1449   }
1450 
1451   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1452     ShenandoahHeapRegion* r = _heap->get_region(i);
1453     if (r->is_old() && !r->is_cset()) {
1454       help_verify_region_rem_set(r, ctx, r->bottom(), r->top(), r->get_update_watermark(),
1455                                  "Remembered set violation at init-update-references");
1456     }
1457   }
1458 }
1459 
1460 void ShenandoahVerifier::verify_before_rebuilding_free_set() {
1461   ShenandoahGenerationStatsClosure cl;
1462   _heap->heap_region_iterate(&cl);
1463 
1464   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->old_generation(), cl.old);
1465   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->young_generation(), cl.young);
1466   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->global_generation(), cl.global);
1467 }