1 /*
   2  * Copyright (c) 2017, 2021, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  31 #include "gc/shenandoah/shenandoahGeneration.hpp"
  32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  33 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  34 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp"
  37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  38 #include "gc/shenandoah/shenandoahUtils.hpp"
  39 #include "gc/shenandoah/shenandoahVerifier.hpp"
  40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  41 #include "memory/allocation.hpp"
  42 #include "memory/iterator.inline.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/compressedOops.inline.hpp"
  45 #include "runtime/atomic.hpp"
  46 #include "runtime/orderAccess.hpp"
  47 #include "runtime/threads.hpp"
  48 #include "utilities/align.hpp"
  49 
  50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  51 #ifdef verify_oop
  52 #undef verify_oop
  53 #endif
  54 
  55 static bool is_instance_ref_klass(Klass* k) {
  56   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  57 }
  58 
  59 class ShenandoahIgnoreReferenceDiscoverer : public ReferenceDiscoverer {
  60 public:
  61   virtual bool discover_reference(oop obj, ReferenceType type) {
  62     return true;
  63   }
  64 };
  65 
  66 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  67 private:
  68   const char* _phase;
  69   ShenandoahVerifier::VerifyOptions _options;
  70   ShenandoahVerifierStack* _stack;
  71   ShenandoahHeap* _heap;
  72   MarkBitMap* _map;
  73   ShenandoahLivenessData* _ld;
  74   void* _interior_loc;
  75   oop _loc;
  76   ShenandoahGeneration* _generation;
  77 
  78 public:
  79   ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
  80                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  81     _phase(phase),
  82     _options(options),
  83     _stack(stack),
  84     _heap(ShenandoahHeap::heap()),
  85     _map(map),
  86     _ld(ld),
  87     _interior_loc(nullptr),
  88     _loc(nullptr),
  89     _generation(nullptr) {
  90     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  91         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  92         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  93       set_ref_discoverer_internal(new ShenandoahIgnoreReferenceDiscoverer());
  94     }
  95 
  96     if (_heap->mode()->is_generational()) {
  97       _generation = _heap->active_generation();
  98       assert(_generation != nullptr, "Expected active generation in this mode");
  99     }
 100   }
 101 
 102 private:
 103   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 104     if (!test) {
 105       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 106     }
 107   }
 108 
 109   template <class T>
 110   void do_oop_work(T* p) {
 111     T o = RawAccess<>::oop_load(p);
 112     if (!CompressedOops::is_null(o)) {
 113       oop obj = CompressedOops::decode_not_null(o);
 114       if (is_instance_ref_klass(obj->klass())) {
 115         obj = ShenandoahForwarding::get_forwardee(obj);
 116       }
 117       // Single threaded verification can use faster non-atomic stack and bitmap
 118       // methods.
 119       //
 120       // For performance reasons, only fully verify non-marked field values.
 121       // We are here when the host object for *p is already marked.
 122 
 123       // TODO: We should consider specializing this closure by generation ==/!= null,
 124       // to avoid in_generation check on fast path here for non-generational mode.
 125       if (in_generation(obj) && _map->par_mark(obj)) {
 126         verify_oop_at(p, obj);
 127         _stack->push(ShenandoahVerifierTask(obj));
 128       }
 129     }
 130   }
 131 
 132   bool in_generation(oop obj) {
 133     if (_generation == nullptr) {
 134       return true;
 135     }
 136 
 137     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 138     return _generation->contains(region);
 139   }
 140 
 141   void verify_oop(oop obj) {
 142     // Perform consistency checks with gradually decreasing safety level. This guarantees
 143     // that failure report would not try to touch something that was not yet verified to be
 144     // safe to process.
 145 
 146     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in(obj),
 147               "oop must be in heap");
 148     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 149               "oop must be aligned");
 150 
 151     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 152     Klass* obj_klass = obj->klass_or_null();
 153 
 154     // Verify that obj is not in dead space:
 155     {
 156       // Do this before touching obj->size()
 157       check(ShenandoahAsserts::_safe_unknown, obj, obj_klass != nullptr,
 158              "Object klass pointer should not be null");
 159       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 160              "Object klass pointer must go to metaspace");
 161 
 162       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 163       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 164              "Object start should be within the region");
 165 
 166       if (!obj_reg->is_humongous()) {
 167         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + obj->size()) <= obj_reg->top(),
 168                "Object end should be within the region");
 169       } else {
 170         size_t humongous_start = obj_reg->index();
 171         size_t humongous_end = humongous_start + (obj->size() >> ShenandoahHeapRegion::region_size_words_shift());
 172         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 173           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 174                  "Humongous object is in continuation that fits it");
 175         }
 176       }
 177 
 178       // ------------ obj is safe at this point --------------
 179 
 180       check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(),
 181             "Object should be in active region");
 182 
 183       switch (_options._verify_liveness) {
 184         case ShenandoahVerifier::_verify_liveness_disable:
 185           // skip
 186           break;
 187         case ShenandoahVerifier::_verify_liveness_complete:
 188           Atomic::add(&_ld[obj_reg->index()], (uint) obj->size(), memory_order_relaxed);
 189           // fallthrough for fast failure for un-live regions:
 190         case ShenandoahVerifier::_verify_liveness_conservative:
 191           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 192                 (obj_reg->is_old() && _heap->active_generation()->is_young()),
 193                    "Object must belong to region with live data");
 194           break;
 195         default:
 196           assert(false, "Unhandled liveness verification");
 197       }
 198     }
 199 
 200     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 201 
 202     ShenandoahHeapRegion* fwd_reg = nullptr;
 203 
 204     if (obj != fwd) {
 205       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in(fwd),
 206              "Forwardee must be in heap");
 207       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 208              "Forwardee is set");
 209       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 210              "Forwardee must be aligned");
 211 
 212       // Do this before touching fwd->size()
 213       Klass* fwd_klass = fwd->klass_or_null();
 214       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 215              "Forwardee klass pointer should not be null");
 216       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 217              "Forwardee klass pointer must go to metaspace");
 218       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 219              "Forwardee klass pointer must go to metaspace");
 220 
 221       fwd_reg = _heap->heap_region_containing(fwd);
 222 
 223       // Verify that forwardee is not in the dead space:
 224       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 225              "Should have no humongous forwardees");
 226 
 227       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 228       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 229              "Forwardee start should be within the region");
 230       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + fwd->size()) <= fwd_reg->top(),
 231              "Forwardee end should be within the region");
 232 
 233       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 234       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 235              "Double forwarding");
 236     } else {
 237       fwd_reg = obj_reg;
 238     }
 239 
 240     // ------------ obj and fwd are safe at this point --------------
 241     // We allow for marked or old here for two reasons:
 242     //  1. If this is a young collect, old objects wouldn't be marked. We've
 243     //     recently change the verifier traversal to only follow young objects
 244     //     during a young collect so this _shouldn't_ be necessary.
 245     //  2. At present, we do not clear dead objects from the remembered set.
 246     //     Everything in the remembered set is old (ipso facto), so allowing for
 247     //     'marked_or_old' covers the case of stale objects in rset.
 248     // TODO: Just use 'is_marked' here.
 249     switch (_options._verify_marked) {
 250       case ShenandoahVerifier::_verify_marked_disable:
 251         // skip
 252         break;
 253       case ShenandoahVerifier::_verify_marked_incomplete:
 254         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked_or_old(obj),
 255                "Must be marked in incomplete bitmap");
 256         break;
 257       case ShenandoahVerifier::_verify_marked_complete:
 258         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 259                "Must be marked in complete bitmap");
 260         break;
 261       case ShenandoahVerifier::_verify_marked_complete_except_references:
 262       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 263         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 264               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 265         break;
 266       default:
 267         assert(false, "Unhandled mark verification");
 268     }
 269 
 270     switch (_options._verify_forwarded) {
 271       case ShenandoahVerifier::_verify_forwarded_disable:
 272         // skip
 273         break;
 274       case ShenandoahVerifier::_verify_forwarded_none: {
 275         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 276                "Should not be forwarded");
 277         break;
 278       }
 279       case ShenandoahVerifier::_verify_forwarded_allow: {
 280         if (obj != fwd) {
 281           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 282                  "Forwardee should be in another region");
 283         }
 284         break;
 285       }
 286       default:
 287         assert(false, "Unhandled forwarding verification");
 288     }
 289 
 290     switch (_options._verify_cset) {
 291       case ShenandoahVerifier::_verify_cset_disable:
 292         // skip
 293         break;
 294       case ShenandoahVerifier::_verify_cset_none:
 295         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 296                "Should not have references to collection set");
 297         break;
 298       case ShenandoahVerifier::_verify_cset_forwarded:
 299         if (_heap->in_collection_set(obj)) {
 300           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 301                  "Object in collection set, should have forwardee");
 302         }
 303         break;
 304       default:
 305         assert(false, "Unhandled cset verification");
 306     }
 307 
 308   }
 309 
 310 public:
 311   /**
 312    * Verify object with known interior reference.
 313    * @param p interior reference where the object is referenced from; can be off-heap
 314    * @param obj verified object
 315    */
 316   template <class T>
 317   void verify_oop_at(T* p, oop obj) {
 318     _interior_loc = p;
 319     verify_oop(obj);
 320     _interior_loc = nullptr;
 321   }
 322 
 323   /**
 324    * Verify object without known interior reference.
 325    * Useful when picking up the object at known offset in heap,
 326    * but without knowing what objects reference it.
 327    * @param obj verified object
 328    */
 329   void verify_oop_standalone(oop obj) {
 330     _interior_loc = nullptr;
 331     verify_oop(obj);
 332     _interior_loc = nullptr;
 333   }
 334 
 335   /**
 336    * Verify oop fields from this object.
 337    * @param obj host object for verified fields
 338    */
 339   void verify_oops_from(oop obj) {
 340     _loc = obj;
 341     obj->oop_iterate(this);
 342     _loc = nullptr;
 343   }
 344 
 345   void do_oop(oop* p) override { do_oop_work(p); }
 346   void do_oop(narrowOop* p) override { do_oop_work(p); }
 347 };
 348 
 349 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 350 // a subset (e.g. the young generation or old generation) of the total heap.
 351 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 352 private:
 353   size_t _used, _committed, _garbage, _regions, _humongous_waste;
 354 public:
 355   ShenandoahCalculateRegionStatsClosure() : _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0) {};
 356 
 357   void heap_region_do(ShenandoahHeapRegion* r) override {
 358     _used += r->used();
 359     _garbage += r->garbage();
 360     _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0;
 361     if (r->is_humongous()) {
 362       _humongous_waste += r->free();
 363     }
 364     _regions++;
 365     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding " SIZE_FORMAT " for %s Region " SIZE_FORMAT ", yielding: " SIZE_FORMAT,
 366             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 367   }
 368 
 369   size_t used() const { return _used; }
 370   size_t committed() const { return _committed; }
 371   size_t garbage() const { return _garbage; }
 372   size_t regions() const { return _regions; }
 373   size_t waste() const { return _humongous_waste; }
 374 
 375   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 376   size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 377 };
 378 
 379 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 380  public:
 381   ShenandoahCalculateRegionStatsClosure old;
 382   ShenandoahCalculateRegionStatsClosure young;
 383   ShenandoahCalculateRegionStatsClosure global;
 384 
 385   void heap_region_do(ShenandoahHeapRegion* r) override {
 386     switch (r->affiliation()) {
 387       case FREE:
 388         return;
 389       case YOUNG_GENERATION:
 390         young.heap_region_do(r);
 391         global.heap_region_do(r);
 392         break;
 393       case OLD_GENERATION:
 394         old.heap_region_do(r);
 395         global.heap_region_do(r);
 396         break;
 397       default:
 398         ShouldNotReachHere();
 399     }
 400   }
 401 
 402   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 403     log_debug(gc)("Safepoint verification: %s verified usage: " SIZE_FORMAT "%s, recorded usage: " SIZE_FORMAT "%s",
 404                   generation->name(),
 405                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 406                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 407   }
 408 
 409   static void validate_usage(const bool adjust_for_padding,
 410                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 411     ShenandoahHeap* heap = ShenandoahHeap::heap();
 412     size_t generation_used = generation->used();
 413     size_t generation_used_regions = generation->used_regions();
 414     if (adjust_for_padding && (generation->is_young() || generation->is_global())) {
 415       size_t pad = heap->old_generation()->get_pad_for_promote_in_place();
 416       generation_used += pad;
 417     }
 418 
 419     guarantee(stats.used() == generation_used,
 420               "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT,
 421               label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats.used()));
 422 
 423     guarantee(stats.regions() == generation_used_regions,
 424               "%s: generation (%s) used regions (" SIZE_FORMAT ") must equal regions that are in use (" SIZE_FORMAT ")",
 425               label, generation->name(), generation->used_regions(), stats.regions());
 426 
 427     size_t generation_capacity = generation->max_capacity();
 428     guarantee(stats.span() <= generation_capacity,
 429               "%s: generation (%s) size spanned by regions (" SIZE_FORMAT ") must not exceed current capacity (" PROPERFMT ")",
 430               label, generation->name(), stats.regions(), PROPERFMTARGS(generation_capacity));
 431 
 432     size_t humongous_waste = generation->get_humongous_waste();
 433     guarantee(stats.waste() == humongous_waste,
 434               "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT,
 435               label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste()));
 436   }
 437 };
 438 
 439 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 440 private:
 441   ShenandoahHeap* _heap;
 442   const char* _phase;
 443   ShenandoahVerifier::VerifyRegions _regions;
 444 public:
 445   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 446     _heap(ShenandoahHeap::heap()),
 447     _phase(phase),
 448     _regions(regions) {};
 449 
 450   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 451     ResourceMark rm;
 452 
 453     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 454 
 455     stringStream ss;
 456     r->print_on(&ss);
 457     msg.append("%s", ss.as_string());
 458 
 459     report_vm_error(__FILE__, __LINE__, msg.buffer());
 460   }
 461 
 462   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 463     if (!test) {
 464       print_failure(r, msg);
 465     }
 466   }
 467 
 468   void heap_region_do(ShenandoahHeapRegion* r) override {
 469     switch (_regions) {
 470       case ShenandoahVerifier::_verify_regions_disable:
 471         break;
 472       case ShenandoahVerifier::_verify_regions_notrash:
 473         verify(r, !r->is_trash(),
 474                "Should not have trash regions");
 475         break;
 476       case ShenandoahVerifier::_verify_regions_nocset:
 477         verify(r, !r->is_cset(),
 478                "Should not have cset regions");
 479         break;
 480       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 481         verify(r, !r->is_trash(),
 482                "Should not have trash regions");
 483         verify(r, !r->is_cset(),
 484                "Should not have cset regions");
 485         break;
 486       default:
 487         ShouldNotReachHere();
 488     }
 489 
 490     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 491            "Capacity should match region size");
 492 
 493     verify(r, r->bottom() <= r->top(),
 494            "Region top should not be less than bottom");
 495 
 496     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 497            "Region TAMS should not be less than bottom");
 498 
 499     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 500            "Complete TAMS should not be larger than top");
 501 
 502     verify(r, r->get_live_data_bytes() <= r->capacity(),
 503            "Live data cannot be larger than capacity");
 504 
 505     verify(r, r->garbage() <= r->capacity(),
 506            "Garbage cannot be larger than capacity");
 507 
 508     verify(r, r->used() <= r->capacity(),
 509            "Used cannot be larger than capacity");
 510 
 511     verify(r, r->get_shared_allocs() <= r->capacity(),
 512            "Shared alloc count should not be larger than capacity");
 513 
 514     verify(r, r->get_tlab_allocs() <= r->capacity(),
 515            "TLAB alloc count should not be larger than capacity");
 516 
 517     verify(r, r->get_gclab_allocs() <= r->capacity(),
 518            "GCLAB alloc count should not be larger than capacity");
 519 
 520     verify(r, r->get_plab_allocs() <= r->capacity(),
 521            "PLAB alloc count should not be larger than capacity");
 522 
 523     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 524            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 525 
 526     verify(r, !r->is_empty() || !r->has_live(),
 527            "Empty regions should not have live data");
 528 
 529     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 530            "Transitional: region flags and collection set agree");
 531   }
 532 };
 533 
 534 class ShenandoahVerifierReachableTask : public WorkerTask {
 535 private:
 536   const char* _label;
 537   ShenandoahVerifier::VerifyOptions _options;
 538   ShenandoahHeap* _heap;
 539   ShenandoahLivenessData* _ld;
 540   MarkBitMap* _bitmap;
 541   volatile size_t _processed;
 542 
 543 public:
 544   ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
 545                                   ShenandoahLivenessData* ld,
 546                                   const char* label,
 547                                   ShenandoahVerifier::VerifyOptions options) :
 548     WorkerTask("Shenandoah Verifier Reachable Objects"),
 549     _label(label),
 550     _options(options),
 551     _heap(ShenandoahHeap::heap()),
 552     _ld(ld),
 553     _bitmap(bitmap),
 554     _processed(0) {};
 555 
 556   size_t processed() const {
 557     return _processed;
 558   }
 559 
 560   void work(uint worker_id) override {
 561     ResourceMark rm;
 562     ShenandoahVerifierStack stack;
 563 
 564     // On level 2, we need to only check the roots once.
 565     // On level 3, we want to check the roots, and seed the local stack.
 566     // It is a lesser evil to accept multiple root scans at level 3, because
 567     // extended parallelism would buy us out.
 568     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 569         || (ShenandoahVerifyLevel >= 3)) {
 570         ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 571                                       ShenandoahMessageBuffer("%s, Roots", _label),
 572                                       _options);
 573         if (_heap->unload_classes()) {
 574           ShenandoahRootVerifier::strong_roots_do(&cl);
 575         } else {
 576           ShenandoahRootVerifier::roots_do(&cl);
 577         }
 578     }
 579 
 580     size_t processed = 0;
 581 
 582     if (ShenandoahVerifyLevel >= 3) {
 583       ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 584                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 585                                     _options);
 586       while (!stack.is_empty()) {
 587         processed++;
 588         ShenandoahVerifierTask task = stack.pop();
 589         cl.verify_oops_from(task.obj());
 590       }
 591     }
 592 
 593     Atomic::add(&_processed, processed, memory_order_relaxed);
 594   }
 595 };
 596 
 597 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 598 public:
 599   void do_thread(Thread* thread) override {
 600     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 601     if (!queue.is_empty()) {
 602       fatal("All SATB buffers should have been flushed during mark");
 603     }
 604   }
 605 };
 606 
 607 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 608 private:
 609   const char* _label;
 610   ShenandoahVerifier::VerifyOptions _options;
 611   ShenandoahHeap *_heap;
 612   MarkBitMap* _bitmap;
 613   ShenandoahLivenessData* _ld;
 614   volatile size_t _claimed;
 615   volatile size_t _processed;
 616   ShenandoahGeneration* _generation;
 617 
 618 public:
 619   ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
 620                                      ShenandoahLivenessData* ld,
 621                                      const char* label,
 622                                      ShenandoahVerifier::VerifyOptions options) :
 623           WorkerTask("Shenandoah Verifier Marked Objects"),
 624           _label(label),
 625           _options(options),
 626           _heap(ShenandoahHeap::heap()),
 627           _bitmap(bitmap),
 628           _ld(ld),
 629           _claimed(0),
 630           _processed(0),
 631           _generation(nullptr) {
 632     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 633       Threads::change_thread_claim_token();
 634     }
 635 
 636     if (_heap->mode()->is_generational()) {
 637       _generation = _heap->active_generation();
 638       assert(_generation != nullptr, "Expected active generation in this mode.");
 639     }
 640   };
 641 
 642   size_t processed() {
 643     return Atomic::load(&_processed);
 644   }
 645 
 646   void work(uint worker_id) override {
 647     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 648       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 649       Threads::possibly_parallel_threads_do(true, &verify_satb);
 650     }
 651 
 652     ShenandoahVerifierStack stack;
 653     ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 654                                   ShenandoahMessageBuffer("%s, Marked", _label),
 655                                   _options);
 656 
 657     while (true) {
 658       size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed);
 659       if (v < _heap->num_regions()) {
 660         ShenandoahHeapRegion* r = _heap->get_region(v);
 661         if (!in_generation(r)) {
 662           continue;
 663         }
 664 
 665         if (!r->is_humongous() && !r->is_trash()) {
 666           work_regular(r, stack, cl);
 667         } else if (r->is_humongous_start()) {
 668           work_humongous(r, stack, cl);
 669         }
 670       } else {
 671         break;
 672       }
 673     }
 674   }
 675 
 676   bool in_generation(ShenandoahHeapRegion* r) {
 677     return _generation == nullptr || _generation->contains(r);
 678   }
 679 
 680   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 681     size_t processed = 0;
 682     HeapWord* obj = r->bottom();
 683     if (_heap->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 684       verify_and_follow(obj, stack, cl, &processed);
 685     }
 686     Atomic::add(&_processed, processed, memory_order_relaxed);
 687   }
 688 
 689   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 690     size_t processed = 0;
 691     ShenandoahMarkingContext* ctx = _heap->complete_marking_context();
 692     HeapWord* tams = ctx->top_at_mark_start(r);
 693 
 694     // Bitmaps, before TAMS
 695     if (tams > r->bottom()) {
 696       HeapWord* start = r->bottom();
 697       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 698 
 699       while (addr < tams) {
 700         verify_and_follow(addr, stack, cl, &processed);
 701         addr += 1;
 702         if (addr < tams) {
 703           addr = ctx->get_next_marked_addr(addr, tams);
 704         }
 705       }
 706     }
 707 
 708     // Size-based, after TAMS
 709     {
 710       HeapWord* limit = r->top();
 711       HeapWord* addr = tams;
 712 
 713       while (addr < limit) {
 714         verify_and_follow(addr, stack, cl, &processed);
 715         addr += cast_to_oop(addr)->size();
 716       }
 717     }
 718 
 719     Atomic::add(&_processed, processed, memory_order_relaxed);
 720   }
 721 
 722   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 723     if (!_bitmap->par_mark(addr)) return;
 724 
 725     // Verify the object itself:
 726     oop obj = cast_to_oop(addr);
 727     cl.verify_oop_standalone(obj);
 728 
 729     // Verify everything reachable from that object too, hopefully realizing
 730     // everything was already marked, and never touching further:
 731     if (!is_instance_ref_klass(obj->klass())) {
 732       cl.verify_oops_from(obj);
 733       (*processed)++;
 734     }
 735     while (!stack.is_empty()) {
 736       ShenandoahVerifierTask task = stack.pop();
 737       cl.verify_oops_from(task.obj());
 738       (*processed)++;
 739     }
 740   }
 741 };
 742 
 743 class VerifyThreadGCState : public ThreadClosure {
 744 private:
 745   const char* const _label;
 746          char const _expected;
 747 
 748 public:
 749   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 750   void do_thread(Thread* t) override {
 751     char actual = ShenandoahThreadLocalData::gc_state(t);
 752     if (!verify_gc_state(actual, _expected)) {
 753       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 754     }
 755   }
 756 
 757   static bool verify_gc_state(char actual, char expected) {
 758     // Old generation marking is allowed in all states.
 759     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 760       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 761     } else {
 762       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 763       return (actual == expected);
 764     }
 765   }
 766 };
 767 
 768 void ShenandoahVerifier::verify_at_safepoint(const char* label,
 769                                              VerifyRememberedSet remembered,
 770                                              VerifyForwarded forwarded, VerifyMarked marked,
 771                                              VerifyCollectionSet cset,
 772                                              VerifyLiveness liveness, VerifyRegions regions,
 773                                              VerifySize sizeness,
 774                                              VerifyGCState gcstate) {
 775   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 776   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 777 
 778   ShenandoahHeap::heap()->propagate_gc_state_to_java_threads();
 779 
 780   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 781   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 782 
 783   log_info(gc,start)("Verify %s, Level " INTX_FORMAT, label, ShenandoahVerifyLevel);
 784 
 785   // GC state checks
 786   {
 787     char expected = -1;
 788     bool enabled;
 789     switch (gcstate) {
 790       case _verify_gcstate_disable:
 791         enabled = false;
 792         break;
 793       case _verify_gcstate_forwarded:
 794         enabled = true;
 795         expected = ShenandoahHeap::HAS_FORWARDED;
 796         break;
 797       case _verify_gcstate_evacuation:
 798         enabled = true;
 799         expected = ShenandoahHeap::EVACUATION;
 800         if (!_heap->is_stw_gc_in_progress()) {
 801           // Only concurrent GC sets this.
 802           expected |= ShenandoahHeap::WEAK_ROOTS;
 803         }
 804         break;
 805       case _verify_gcstate_updating:
 806         enabled = true;
 807         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATEREFS;
 808         break;
 809       case _verify_gcstate_stable:
 810         enabled = true;
 811         expected = ShenandoahHeap::STABLE;
 812         break;
 813       case _verify_gcstate_stable_weakroots:
 814         enabled = true;
 815         expected = ShenandoahHeap::STABLE;
 816         if (!_heap->is_stw_gc_in_progress()) {
 817           // Only concurrent GC sets this.
 818           expected |= ShenandoahHeap::WEAK_ROOTS;
 819         }
 820         break;
 821       default:
 822         enabled = false;
 823         assert(false, "Unhandled gc-state verification");
 824     }
 825 
 826     if (enabled) {
 827       char actual = _heap->gc_state();
 828 
 829       bool is_marking = (actual & ShenandoahHeap::MARKING);
 830       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING));
 831       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 832 
 833       // Old generation marking is allowed in all states.
 834       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 835         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 836       }
 837 
 838       VerifyThreadGCState vtgcs(label, expected);
 839       Threads::java_threads_do(&vtgcs);
 840     }
 841   }
 842 
 843   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 844   ShenandoahGCStateResetter resetter;
 845 
 846   // Heap size checks
 847   {
 848     ShenandoahHeapLocker lock(_heap->lock());
 849 
 850     ShenandoahCalculateRegionStatsClosure cl;
 851     _heap->heap_region_iterate(&cl);
 852     size_t heap_used;
 853     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 854       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 855       heap_used = _heap->used() + _heap->old_generation()->get_pad_for_promote_in_place();
 856     } else if (sizeness != _verify_size_disable) {
 857       heap_used = _heap->used();
 858     }
 859     if (sizeness != _verify_size_disable) {
 860       guarantee(cl.used() == heap_used,
 861                 "%s: heap used size must be consistent: heap-used = " SIZE_FORMAT "%s, regions-used = " SIZE_FORMAT "%s",
 862                 label,
 863                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 864                 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used()));
 865     }
 866     size_t heap_committed = _heap->committed();
 867     guarantee(cl.committed() == heap_committed,
 868               "%s: heap committed size must be consistent: heap-committed = " SIZE_FORMAT "%s, regions-committed = " SIZE_FORMAT "%s",
 869               label,
 870               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 871               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 872   }
 873 
 874   log_debug(gc)("Safepoint verification finished heap usage verification");
 875 
 876   ShenandoahGeneration* generation;
 877   if (_heap->mode()->is_generational()) {
 878     generation = _heap->active_generation();
 879     guarantee(generation != nullptr, "Need to know which generation to verify.");
 880   } else {
 881     generation = nullptr;
 882   }
 883 
 884   if (generation != nullptr) {
 885     ShenandoahHeapLocker lock(_heap->lock());
 886 
 887     switch (remembered) {
 888       case _verify_remembered_disable:
 889         break;
 890       case _verify_remembered_before_marking:
 891         log_debug(gc)("Safepoint verification of remembered set at mark");
 892         verify_rem_set_before_mark();
 893         break;
 894       case _verify_remembered_before_updating_references:
 895         log_debug(gc)("Safepoint verification of remembered set at update ref");
 896         verify_rem_set_before_update_ref();
 897         break;
 898       case _verify_remembered_after_full_gc:
 899         log_debug(gc)("Safepoint verification of remembered set after full gc");
 900         verify_rem_set_after_full_gc();
 901         break;
 902       default:
 903         fatal("Unhandled remembered set verification mode");
 904     }
 905 
 906     ShenandoahGenerationStatsClosure cl;
 907     _heap->heap_region_iterate(&cl);
 908 
 909     if (LogTarget(Debug, gc)::is_enabled()) {
 910       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl.old);
 911       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl.young);
 912       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global);
 913     }
 914     if (sizeness == _verify_size_adjusted_for_padding) {
 915       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 916       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young);
 917       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global);
 918     } else if (sizeness == _verify_size_exact) {
 919       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 920       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young);
 921       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global);
 922     }
 923     // else: sizeness must equal _verify_size_disable
 924   }
 925 
 926   log_debug(gc)("Safepoint verification finished remembered set verification");
 927 
 928   // Internal heap region checks
 929   if (ShenandoahVerifyLevel >= 1) {
 930     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 931     if (generation != nullptr) {
 932       generation->heap_region_iterate(&cl);
 933     } else {
 934       _heap->heap_region_iterate(&cl);
 935     }
 936   }
 937 
 938   log_debug(gc)("Safepoint verification finished heap region closure verification");
 939 
 940   OrderAccess::fence();
 941 
 942   if (UseTLAB) {
 943     _heap->labs_make_parsable();
 944   }
 945 
 946   // Allocate temporary bitmap for storing marking wavefront:
 947   _verification_bit_map->clear();
 948 
 949   // Allocate temporary array for storing liveness data
 950   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
 951   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
 952 
 953   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
 954 
 955   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
 956   // This verifies what application can see, since it only cares about reachable objects.
 957   size_t count_reachable = 0;
 958   if (ShenandoahVerifyLevel >= 2) {
 959     ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
 960     _heap->workers()->run_task(&task);
 961     count_reachable = task.processed();
 962   }
 963 
 964   log_debug(gc)("Safepoint verification finished getting initial reachable set");
 965 
 966   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
 967   // not the application, can see during the region scans. There is no reason to process the objects
 968   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
 969   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
 970   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
 971   // version
 972 
 973   size_t count_marked = 0;
 974   if (ShenandoahVerifyLevel >= 4 &&
 975         (marked == _verify_marked_complete ||
 976          marked == _verify_marked_complete_except_references ||
 977          marked == _verify_marked_complete_satb_empty)) {
 978     guarantee(_heap->marking_context()->is_complete(), "Marking context should be complete");
 979     ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
 980     _heap->workers()->run_task(&task);
 981     count_marked = task.processed();
 982   } else {
 983     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
 984   }
 985 
 986   log_debug(gc)("Safepoint verification finished walking marked objects");
 987 
 988   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
 989   // marked objects.
 990 
 991   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
 992     for (size_t i = 0; i < _heap->num_regions(); i++) {
 993       ShenandoahHeapRegion* r = _heap->get_region(i);
 994       if (generation != nullptr && !generation->contains(r)) {
 995         continue;
 996       }
 997 
 998       juint verf_live = 0;
 999       if (r->is_humongous()) {
1000         // For humongous objects, test if start region is marked live, and if so,
1001         // all humongous regions in that chain have live data equal to their "used".
1002         juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]);
1003         if (start_live > 0) {
1004           verf_live = (juint)(r->used() / HeapWordSize);
1005         }
1006       } else {
1007         verf_live = Atomic::load(&ld[r->index()]);
1008       }
1009 
1010       size_t reg_live = r->get_live_data_words();
1011       if (reg_live != verf_live) {
1012         stringStream ss;
1013         r->print_on(&ss);
1014         fatal("%s: Live data should match: region-live = " SIZE_FORMAT ", verifier-live = " UINT32_FORMAT "\n%s",
1015               label, reg_live, verf_live, ss.freeze());
1016       }
1017     }
1018   }
1019 
1020   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1021 
1022 
1023   log_info(gc)("Verify %s, Level " INTX_FORMAT " (" SIZE_FORMAT " reachable, " SIZE_FORMAT " marked)",
1024                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1025 
1026   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1027 }
1028 
1029 void ShenandoahVerifier::verify_generic(VerifyOption vo) {
1030   verify_at_safepoint(
1031           "Generic Verification",
1032           _verify_remembered_disable,  // do not verify remembered set
1033           _verify_forwarded_allow,     // conservatively allow forwarded
1034           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1035           _verify_cset_disable,        // cset may be inconsistent
1036           _verify_liveness_disable,    // no reliable liveness data
1037           _verify_regions_disable,     // no reliable region data
1038           _verify_size_exact,          // expect generation and heap sizes to match exactly
1039           _verify_gcstate_disable      // no data about gcstate
1040   );
1041 }
1042 
1043 void ShenandoahVerifier::verify_before_concmark() {
1044     verify_at_safepoint(
1045           "Before Mark",
1046           _verify_remembered_before_marking,
1047                                        // verify read-only remembered set from bottom() to top()
1048           _verify_forwarded_none,      // UR should have fixed up
1049           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1050           _verify_cset_none,           // UR should have fixed this
1051           _verify_liveness_disable,    // no reliable liveness data
1052           _verify_regions_notrash,     // no trash regions
1053           _verify_size_exact,          // expect generation and heap sizes to match exactly
1054           _verify_gcstate_stable       // there are no forwarded objects
1055   );
1056 }
1057 
1058 void ShenandoahVerifier::verify_after_concmark() {
1059   verify_at_safepoint(
1060           "After Mark",
1061           _verify_remembered_disable,  // do not verify remembered set
1062           _verify_forwarded_none,      // no forwarded references
1063           _verify_marked_complete_satb_empty,
1064                                        // bitmaps as precise as we can get, except dangling j.l.r.Refs
1065           _verify_cset_none,           // no references to cset anymore
1066           _verify_liveness_complete,   // liveness data must be complete here
1067           _verify_regions_disable,     // trash regions not yet recycled
1068           _verify_size_exact,          // expect generation and heap sizes to match exactly
1069           _verify_gcstate_stable_weakroots  // heap is still stable, weakroots are in progress
1070   );
1071 }
1072 
1073 void ShenandoahVerifier::verify_before_evacuation() {
1074   verify_at_safepoint(
1075           "Before Evacuation",
1076           _verify_remembered_disable,                // do not verify remembered set
1077           _verify_forwarded_none,                    // no forwarded references
1078           _verify_marked_complete_except_references, // walk over marked objects too
1079           _verify_cset_disable,                      // non-forwarded references to cset expected
1080           _verify_liveness_complete,                 // liveness data must be complete here
1081           _verify_regions_disable,                   // trash regions not yet recycled
1082           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1083                                                      //  for promote in place padding
1084           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1085   );
1086 }
1087 
1088 void ShenandoahVerifier::verify_during_evacuation() {
1089   verify_at_safepoint(
1090           "During Evacuation",
1091           _verify_remembered_disable, // do not verify remembered set
1092           _verify_forwarded_allow,    // some forwarded references are allowed
1093           _verify_marked_disable,     // walk only roots
1094           _verify_cset_disable,       // some cset references are not forwarded yet
1095           _verify_liveness_disable,   // liveness data might be already stale after pre-evacs
1096           _verify_regions_disable,    // trash regions not yet recycled
1097           _verify_size_disable,       // we don't know how much of promote-in-place work has been completed
1098           _verify_gcstate_evacuation  // evacuation is in progress
1099   );
1100 }
1101 
1102 void ShenandoahVerifier::verify_after_evacuation() {
1103   verify_at_safepoint(
1104           "After Evacuation",
1105           _verify_remembered_disable,  // do not verify remembered set
1106           _verify_forwarded_allow,     // objects are still forwarded
1107           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1108           _verify_cset_forwarded,      // all cset refs are fully forwarded
1109           _verify_liveness_disable,    // no reliable liveness data anymore
1110           _verify_regions_notrash,     // trash regions have been recycled already
1111           _verify_size_exact,          // expect generation and heap sizes to match exactly
1112           _verify_gcstate_forwarded    // evacuation produced some forwarded objects
1113   );
1114 }
1115 
1116 void ShenandoahVerifier::verify_before_updaterefs() {
1117   verify_at_safepoint(
1118           "Before Updating References",
1119           _verify_remembered_before_updating_references,  // verify read-write remembered set
1120           _verify_forwarded_allow,     // forwarded references allowed
1121           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1122           _verify_cset_forwarded,      // all cset refs are fully forwarded
1123           _verify_liveness_disable,    // no reliable liveness data anymore
1124           _verify_regions_notrash,     // trash regions have been recycled already
1125           _verify_size_exact,          // expect generation and heap sizes to match exactly
1126           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1127   );
1128 }
1129 
1130 // We have not yet cleanup (reclaimed) the collection set
1131 void ShenandoahVerifier::verify_after_updaterefs() {
1132   verify_at_safepoint(
1133           "After Updating References",
1134           _verify_remembered_disable,  // do not verify remembered set
1135           _verify_forwarded_none,      // no forwarded references
1136           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1137           _verify_cset_none,           // no cset references, all updated
1138           _verify_liveness_disable,    // no reliable liveness data anymore
1139           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1140           _verify_size_exact,          // expect generation and heap sizes to match exactly
1141           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1142   );
1143 }
1144 
1145 void ShenandoahVerifier::verify_after_degenerated() {
1146   verify_at_safepoint(
1147           "After Degenerated GC",
1148           _verify_remembered_disable,  // do not verify remembered set
1149           _verify_forwarded_none,      // all objects are non-forwarded
1150           _verify_marked_complete,     // all objects are marked in complete bitmap
1151           _verify_cset_none,           // no cset references
1152           _verify_liveness_disable,    // no reliable liveness data anymore
1153           _verify_regions_notrash_nocset, // no trash, no cset
1154           _verify_size_exact,          // expect generation and heap sizes to match exactly
1155           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1156   );
1157 }
1158 
1159 void ShenandoahVerifier::verify_before_fullgc() {
1160   verify_at_safepoint(
1161           "Before Full GC",
1162           _verify_remembered_disable,  // do not verify remembered set
1163           _verify_forwarded_allow,     // can have forwarded objects
1164           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1165           _verify_cset_disable,        // cset might be foobared
1166           _verify_liveness_disable,    // no reliable liveness data anymore
1167           _verify_regions_disable,     // no reliable region data here
1168           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1169           _verify_gcstate_disable      // no reliable gcstate data
1170   );
1171 }
1172 
1173 void ShenandoahVerifier::verify_after_fullgc() {
1174   verify_at_safepoint(
1175           "After Full GC",
1176           _verify_remembered_after_full_gc,  // verify read-write remembered set
1177           _verify_forwarded_none,      // all objects are non-forwarded
1178           _verify_marked_complete,     // all objects are marked in complete bitmap
1179           _verify_cset_none,           // no cset references
1180           _verify_liveness_disable,    // no reliable liveness data anymore
1181           _verify_regions_notrash_nocset, // no trash, no cset
1182           _verify_size_exact,           // expect generation and heap sizes to match exactly
1183           _verify_gcstate_stable        // full gc cleaned up everything
1184   );
1185 }
1186 
1187 // TODO: Why this closure does not visit metadata?
1188 class ShenandoahVerifyNoForwared : public BasicOopIterateClosure {
1189 private:
1190   template <class T>
1191   void do_oop_work(T* p) {
1192     T o = RawAccess<>::oop_load(p);
1193     if (!CompressedOops::is_null(o)) {
1194       oop obj = CompressedOops::decode_not_null(o);
1195       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1196       if (obj != fwd) {
1197         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1198                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1199       }
1200     }
1201   }
1202 
1203 public:
1204   void do_oop(narrowOop* p) { do_oop_work(p); }
1205   void do_oop(oop* p)       { do_oop_work(p); }
1206 };
1207 
1208 // TODO: Why this closure does not visit metadata?
1209 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1210 private:
1211   template <class T>
1212   void do_oop_work(T* p) {
1213     T o = RawAccess<>::oop_load(p);
1214     if (!CompressedOops::is_null(o)) {
1215       oop obj = CompressedOops::decode_not_null(o);
1216       ShenandoahHeap* heap = ShenandoahHeap::heap();
1217 
1218       if (!heap->marking_context()->is_marked_or_old(obj)) {
1219         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1220                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1221       }
1222 
1223       if (heap->in_collection_set(obj)) {
1224         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1225                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1226       }
1227 
1228       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1229       if (obj != fwd) {
1230         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1231                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1232       }
1233     }
1234   }
1235 
1236 public:
1237   void do_oop(narrowOop* p) override { do_oop_work(p); }
1238   void do_oop(oop* p)       override { do_oop_work(p); }
1239 };
1240 
1241 void ShenandoahVerifier::verify_roots_in_to_space() {
1242   ShenandoahVerifyInToSpaceClosure cl;
1243   ShenandoahRootVerifier::roots_do(&cl);
1244 }
1245 
1246 void ShenandoahVerifier::verify_roots_no_forwarded() {
1247   ShenandoahVerifyNoForwared cl;
1248   ShenandoahRootVerifier::roots_do(&cl);
1249 }
1250 
1251 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1252 protected:
1253   bool                        const _init_mark;
1254   ShenandoahGenerationalHeap* const _heap;
1255   RememberedScanner*          const _scanner;
1256 
1257 public:
1258   // Argument distinguishes between initial mark or start of update refs verification.
1259   explicit ShenandoahVerifyRemSetClosure(bool init_mark) :
1260             _init_mark(init_mark),
1261             _heap(ShenandoahGenerationalHeap::heap()),
1262             _scanner(_heap->old_generation()->card_scan()) {}
1263 
1264   template<class T>
1265   inline void work(T* p) {
1266     T o = RawAccess<>::oop_load(p);
1267     if (!CompressedOops::is_null(o)) {
1268       oop obj = CompressedOops::decode_not_null(o);
1269       if (_heap->is_in_young(obj)) {
1270         size_t card_index = _scanner->card_index_for_addr((HeapWord*) p);
1271         if (_init_mark && !_scanner->is_card_dirty(card_index)) {
1272           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1273                                            "Verify init-mark remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1274         } else if (!_init_mark && !_scanner->is_write_card_dirty(card_index)) {
1275           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1276                                            "Verify init-update-refs remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1277         }
1278       }
1279     }
1280   }
1281 
1282   void do_oop(narrowOop* p) override { work(p); }
1283   void do_oop(oop* p)       override { work(p); }
1284 };
1285 
1286 void ShenandoahVerifier::help_verify_region_rem_set(ShenandoahHeapRegion* r, ShenandoahMarkingContext* ctx, HeapWord* from,
1287                                                     HeapWord* top, HeapWord* registration_watermark, const char* message) {
1288   RememberedScanner* scanner = ShenandoahGenerationalHeap::heap()->old_generation()->card_scan();
1289   ShenandoahVerifyRemSetClosure check_interesting_pointers(false);
1290 
1291   HeapWord* obj_addr = from;
1292   if (r->is_humongous_start()) {
1293     oop obj = cast_to_oop(obj_addr);
1294     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1295       size_t card_index = scanner->card_index_for_addr(obj_addr);
1296       // For humongous objects, the typical object is an array, so the following checks may be overkill
1297       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1298       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1299       if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1300         obj->oop_iterate(&check_interesting_pointers);
1301       }
1302       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1303     }
1304     // else, this humongous object is not live so no need to verify its internal pointers
1305 
1306     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1307       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1308                                        "object not properly registered", __FILE__, __LINE__);
1309     }
1310   } else if (!r->is_humongous()) {
1311     while (obj_addr < top) {
1312       oop obj = cast_to_oop(obj_addr);
1313       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1314       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1315         size_t card_index = scanner->card_index_for_addr(obj_addr);
1316         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1317         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1318         if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1319           obj->oop_iterate(&check_interesting_pointers);
1320         }
1321         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1322 
1323         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1324           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1325                                            "object not properly registered", __FILE__, __LINE__);
1326         }
1327         obj_addr += obj->size();
1328       } else {
1329         // This object is not live so we don't verify dirty cards contained therein
1330         HeapWord* tams = ctx->top_at_mark_start(r);
1331         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1332       }
1333     }
1334   }
1335 }
1336 
1337 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1338 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1339 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1340 // restore them until the start of evacuation.
1341 void ShenandoahVerifier::verify_rem_set_before_mark() {
1342   shenandoah_assert_safepoint();
1343   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1344 
1345   RememberedScanner* scanner = ShenandoahGenerationalHeap::heap()->old_generation()->card_scan();
1346   ShenandoahVerifyRemSetClosure check_interesting_pointers(true);
1347   ShenandoahMarkingContext* ctx;
1348 
1349   ShenandoahOldGeneration* old_generation = _heap->old_generation();
1350   log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young");
1351 
1352   if (old_generation->is_mark_complete() || _heap->active_generation()->is_global()) {
1353     ctx = _heap->complete_marking_context();
1354   } else {
1355     ctx = nullptr;
1356   }
1357 
1358   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1359     ShenandoahHeapRegion* r = _heap->get_region(i);
1360     HeapWord* tams = (ctx != nullptr) ? ctx->top_at_mark_start(r) : nullptr;
1361 
1362     // TODO: Is this replaceable with call to help_verify_region_rem_set?
1363 
1364     if (r->is_old() && r->is_active()) {
1365       HeapWord* obj_addr = r->bottom();
1366       if (r->is_humongous_start()) {
1367         oop obj = cast_to_oop(obj_addr);
1368         if ((ctx == nullptr) || ctx->is_marked(obj)) {
1369           // For humongous objects, the typical object is an array, so the following checks may be overkill
1370           // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1371           // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1372           if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1373             obj->oop_iterate(&check_interesting_pointers);
1374           }
1375           // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1376         }
1377         // else, this humongous object is not marked so no need to verify its internal pointers
1378         if (!scanner->verify_registration(obj_addr, ctx)) {
1379           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1380                                            "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1381         }
1382       } else if (!r->is_humongous()) {
1383         HeapWord* top = r->top();
1384         while (obj_addr < top) {
1385           oop obj = cast_to_oop(obj_addr);
1386           // ctx->is_marked() returns true if mark bit set (TAMS not relevant during init mark)
1387           if ((ctx == nullptr) || ctx->is_marked(obj)) {
1388             // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1389             // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1390             if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1391               obj->oop_iterate(&check_interesting_pointers);
1392             }
1393             // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1394             if (!scanner->verify_registration(obj_addr, ctx)) {
1395               ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1396                                                "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1397             }
1398             obj_addr += obj->size();
1399           } else {
1400             // This object is not live so we don't verify dirty cards contained therein
1401             assert(tams != nullptr, "If object is not live, ctx and tams should be non-null");
1402             obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1403           }
1404         }
1405       } // else, we ignore humongous continuation region
1406     } // else, this is not an OLD region so we ignore it
1407   } // all regions have been processed
1408 }
1409 
1410 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1411   shenandoah_assert_safepoint();
1412   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1413 
1414   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1415     ShenandoahHeapRegion* r = _heap->get_region(i);
1416     if (r->is_old() && !r->is_cset()) {
1417       help_verify_region_rem_set(r, nullptr, r->bottom(), r->top(), r->top(), "Remembered set violation at end of Full GC");
1418     }
1419   }
1420 }
1421 
1422 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1423 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1424 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1425 // all PLABS are retired immediately before the start of update refs.
1426 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1427   shenandoah_assert_safepoint();
1428   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1429 
1430   ShenandoahMarkingContext* ctx;
1431 
1432   if (_heap->old_generation()->is_mark_complete() || _heap->active_generation()->is_global()) {
1433     ctx = _heap->complete_marking_context();
1434   } else {
1435     ctx = nullptr;
1436   }
1437 
1438   for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) {
1439     ShenandoahHeapRegion* r = _heap->get_region(i);
1440     if (r->is_old() && !r->is_cset()) {
1441       help_verify_region_rem_set(r, ctx, r->bottom(), r->top(), r->get_update_watermark(),
1442                                  "Remembered set violation at init-update-references");
1443     }
1444   }
1445 }
1446 
1447 void ShenandoahVerifier::verify_before_rebuilding_free_set() {
1448   ShenandoahGenerationStatsClosure cl;
1449   _heap->heap_region_iterate(&cl);
1450 
1451   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->old_generation(), cl.old);
1452   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->young_generation(), cl.young);
1453   ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->global_generation(), cl.global);
1454 }