1 /*
   2  * Copyright (c) 2017, 2021, Red Hat, Inc. All rights reserved.
   3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 #include "gc/shared/tlab_globals.hpp"
  28 #include "gc/shenandoah/shenandoahAsserts.hpp"
  29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp"
  30 #include "gc/shenandoah/shenandoahPhaseTimings.hpp"
  31 #include "gc/shenandoah/shenandoahGeneration.hpp"
  32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  33 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp"
  34 #include "gc/shenandoah/shenandoahOldGeneration.hpp"
  35 #include "gc/shenandoah/shenandoahRootProcessor.hpp"
  36 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp"
  37 #include "gc/shenandoah/shenandoahUtils.hpp"
  38 #include "gc/shenandoah/shenandoahVerifier.hpp"
  39 #include "gc/shenandoah/shenandoahYoungGeneration.hpp"
  40 #include "memory/allocation.hpp"
  41 #include "memory/iterator.inline.hpp"
  42 #include "memory/resourceArea.hpp"
  43 #include "oops/compressedOops.inline.hpp"
  44 #include "runtime/atomic.hpp"
  45 #include "runtime/orderAccess.hpp"
  46 #include "runtime/threads.hpp"
  47 #include "utilities/align.hpp"
  48 
  49 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp)
  50 #ifdef verify_oop
  51 #undef verify_oop
  52 #endif
  53 
  54 static bool is_instance_ref_klass(Klass* k) {
  55   return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE;
  56 }
  57 
  58 class ShenandoahIgnoreReferenceDiscoverer : public ReferenceDiscoverer {
  59 public:
  60   virtual bool discover_reference(oop obj, ReferenceType type) {
  61     return true;
  62   }
  63 };
  64 
  65 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure {
  66 private:
  67   const char* _phase;
  68   ShenandoahVerifier::VerifyOptions _options;
  69   ShenandoahVerifierStack* _stack;
  70   ShenandoahHeap* _heap;
  71   MarkBitMap* _map;
  72   ShenandoahLivenessData* _ld;
  73   void* _interior_loc;
  74   oop _loc;
  75   ShenandoahGeneration* _generation;
  76 
  77 public:
  78   ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld,
  79                              const char* phase, ShenandoahVerifier::VerifyOptions options) :
  80     _phase(phase),
  81     _options(options),
  82     _stack(stack),
  83     _heap(ShenandoahHeap::heap()),
  84     _map(map),
  85     _ld(ld),
  86     _interior_loc(nullptr),
  87     _loc(nullptr),
  88     _generation(nullptr) {
  89     if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references ||
  90         options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty ||
  91         options._verify_marked == ShenandoahVerifier::_verify_marked_disable) {
  92       set_ref_discoverer_internal(new ShenandoahIgnoreReferenceDiscoverer());
  93     }
  94 
  95     if (_heap->mode()->is_generational()) {
  96       _generation = _heap->active_generation();
  97       assert(_generation != nullptr, "Expected active generation in this mode");
  98     }
  99   }
 100 
 101 private:
 102   void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) {
 103     if (!test) {
 104       ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__);
 105     }
 106   }
 107 
 108   template <class T>
 109   void do_oop_work(T* p) {
 110     T o = RawAccess<>::oop_load(p);
 111     if (!CompressedOops::is_null(o)) {
 112       oop obj = CompressedOops::decode_not_null(o);
 113       if (is_instance_ref_klass(obj->klass())) {
 114         obj = ShenandoahForwarding::get_forwardee(obj);
 115       }
 116       // Single threaded verification can use faster non-atomic stack and bitmap
 117       // methods.
 118       //
 119       // For performance reasons, only fully verify non-marked field values.
 120       // We are here when the host object for *p is already marked.
 121 
 122       // TODO: We should consider specializing this closure by generation ==/!= null,
 123       // to avoid in_generation check on fast path here for non-generational mode.
 124       if (in_generation(obj) && _map->par_mark(obj)) {
 125         verify_oop_at(p, obj);
 126         _stack->push(ShenandoahVerifierTask(obj));
 127       }
 128     }
 129   }
 130 
 131   bool in_generation(oop obj) {
 132     if (_generation == nullptr) {
 133       return true;
 134     }
 135 
 136     ShenandoahHeapRegion* region = _heap->heap_region_containing(obj);
 137     return _generation->contains(region);
 138   }
 139 
 140   void verify_oop(oop obj) {
 141     // Perform consistency checks with gradually decreasing safety level. This guarantees
 142     // that failure report would not try to touch something that was not yet verified to be
 143     // safe to process.
 144 
 145     check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in(obj),
 146               "oop must be in heap");
 147     check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj),
 148               "oop must be aligned");
 149 
 150     ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj);
 151     Klass* obj_klass = obj->klass_or_null();
 152 
 153     // Verify that obj is not in dead space:
 154     {
 155       // Do this before touching obj->size()
 156       check(ShenandoahAsserts::_safe_unknown, obj, obj_klass != nullptr,
 157              "Object klass pointer should not be null");
 158       check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass),
 159              "Object klass pointer must go to metaspace");
 160 
 161       HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj);
 162       check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(),
 163              "Object start should be within the region");
 164 
 165       if (!obj_reg->is_humongous()) {
 166         check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + obj->size()) <= obj_reg->top(),
 167                "Object end should be within the region");
 168       } else {
 169         size_t humongous_start = obj_reg->index();
 170         size_t humongous_end = humongous_start + (obj->size() >> ShenandoahHeapRegion::region_size_words_shift());
 171         for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) {
 172           check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(),
 173                  "Humongous object is in continuation that fits it");
 174         }
 175       }
 176 
 177       // ------------ obj is safe at this point --------------
 178 
 179       check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(),
 180             "Object should be in active region");
 181 
 182       switch (_options._verify_liveness) {
 183         case ShenandoahVerifier::_verify_liveness_disable:
 184           // skip
 185           break;
 186         case ShenandoahVerifier::_verify_liveness_complete:
 187           Atomic::add(&_ld[obj_reg->index()], (uint) obj->size(), memory_order_relaxed);
 188           // fallthrough for fast failure for un-live regions:
 189         case ShenandoahVerifier::_verify_liveness_conservative:
 190           check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() ||
 191                 (obj_reg->is_old() && ShenandoahHeap::heap()->is_gc_generation_young()),
 192                    "Object must belong to region with live data");
 193           break;
 194         default:
 195           assert(false, "Unhandled liveness verification");
 196       }
 197     }
 198 
 199     oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
 200 
 201     ShenandoahHeapRegion* fwd_reg = nullptr;
 202 
 203     if (obj != fwd) {
 204       check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in(fwd),
 205              "Forwardee must be in heap");
 206       check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd),
 207              "Forwardee is set");
 208       check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd),
 209              "Forwardee must be aligned");
 210 
 211       // Do this before touching fwd->size()
 212       Klass* fwd_klass = fwd->klass_or_null();
 213       check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr,
 214              "Forwardee klass pointer should not be null");
 215       check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass),
 216              "Forwardee klass pointer must go to metaspace");
 217       check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass,
 218              "Forwardee klass pointer must go to metaspace");
 219 
 220       fwd_reg = _heap->heap_region_containing(fwd);
 221 
 222       // Verify that forwardee is not in the dead space:
 223       check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(),
 224              "Should have no humongous forwardees");
 225 
 226       HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd);
 227       check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(),
 228              "Forwardee start should be within the region");
 229       check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + fwd->size()) <= fwd_reg->top(),
 230              "Forwardee end should be within the region");
 231 
 232       oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd);
 233       check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2),
 234              "Double forwarding");
 235     } else {
 236       fwd_reg = obj_reg;
 237     }
 238 
 239     // ------------ obj and fwd are safe at this point --------------
 240     // We allow for marked or old here for two reasons:
 241     //  1. If this is a young collect, old objects wouldn't be marked. We've
 242     //     recently change the verifier traversal to only follow young objects
 243     //     during a young collect so this _shouldn't_ be necessary.
 244     //  2. At present, we do not clear dead objects from the remembered set.
 245     //     Everything in the remembered set is old (ipso facto), so allowing for
 246     //     'marked_or_old' covers the case of stale objects in rset.
 247     // TODO: Just use 'is_marked' here.
 248     switch (_options._verify_marked) {
 249       case ShenandoahVerifier::_verify_marked_disable:
 250         // skip
 251         break;
 252       case ShenandoahVerifier::_verify_marked_incomplete:
 253         check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked_or_old(obj),
 254                "Must be marked in incomplete bitmap");
 255         break;
 256       case ShenandoahVerifier::_verify_marked_complete:
 257         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 258                "Must be marked in complete bitmap");
 259         break;
 260       case ShenandoahVerifier::_verify_marked_complete_except_references:
 261       case ShenandoahVerifier::_verify_marked_complete_satb_empty:
 262         check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked_or_old(obj),
 263               "Must be marked in complete bitmap, except j.l.r.Reference referents");
 264         break;
 265       default:
 266         assert(false, "Unhandled mark verification");
 267     }
 268 
 269     switch (_options._verify_forwarded) {
 270       case ShenandoahVerifier::_verify_forwarded_disable:
 271         // skip
 272         break;
 273       case ShenandoahVerifier::_verify_forwarded_none: {
 274         check(ShenandoahAsserts::_safe_all, obj, (obj == fwd),
 275                "Should not be forwarded");
 276         break;
 277       }
 278       case ShenandoahVerifier::_verify_forwarded_allow: {
 279         if (obj != fwd) {
 280           check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg,
 281                  "Forwardee should be in another region");
 282         }
 283         break;
 284       }
 285       default:
 286         assert(false, "Unhandled forwarding verification");
 287     }
 288 
 289     switch (_options._verify_cset) {
 290       case ShenandoahVerifier::_verify_cset_disable:
 291         // skip
 292         break;
 293       case ShenandoahVerifier::_verify_cset_none:
 294         check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj),
 295                "Should not have references to collection set");
 296         break;
 297       case ShenandoahVerifier::_verify_cset_forwarded:
 298         if (_heap->in_collection_set(obj)) {
 299           check(ShenandoahAsserts::_safe_all, obj, (obj != fwd),
 300                  "Object in collection set, should have forwardee");
 301         }
 302         break;
 303       default:
 304         assert(false, "Unhandled cset verification");
 305     }
 306 
 307   }
 308 
 309 public:
 310   /**
 311    * Verify object with known interior reference.
 312    * @param p interior reference where the object is referenced from; can be off-heap
 313    * @param obj verified object
 314    */
 315   template <class T>
 316   void verify_oop_at(T* p, oop obj) {
 317     _interior_loc = p;
 318     verify_oop(obj);
 319     _interior_loc = nullptr;
 320   }
 321 
 322   /**
 323    * Verify object without known interior reference.
 324    * Useful when picking up the object at known offset in heap,
 325    * but without knowing what objects reference it.
 326    * @param obj verified object
 327    */
 328   void verify_oop_standalone(oop obj) {
 329     _interior_loc = nullptr;
 330     verify_oop(obj);
 331     _interior_loc = nullptr;
 332   }
 333 
 334   /**
 335    * Verify oop fields from this object.
 336    * @param obj host object for verified fields
 337    */
 338   void verify_oops_from(oop obj) {
 339     _loc = obj;
 340     obj->oop_iterate(this);
 341     _loc = nullptr;
 342   }
 343 
 344   virtual void do_oop(oop* p) { do_oop_work(p); }
 345   virtual void do_oop(narrowOop* p) { do_oop_work(p); }
 346 };
 347 
 348 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within
 349 // a subset (e.g. the young generation or old generation) of the total heap.
 350 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure {
 351 private:
 352   size_t _used, _committed, _garbage, _regions, _humongous_waste;
 353 public:
 354   ShenandoahCalculateRegionStatsClosure() : _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0) {};
 355 
 356   void heap_region_do(ShenandoahHeapRegion* r) {
 357     _used += r->used();
 358     _garbage += r->garbage();
 359     _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0;
 360     if (r->is_humongous()) {
 361       _humongous_waste += r->free();
 362     }
 363     _regions++;
 364     log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding " SIZE_FORMAT " for %s Region " SIZE_FORMAT ", yielding: " SIZE_FORMAT,
 365             r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used);
 366   }
 367 
 368   size_t used() { return _used; }
 369   size_t committed() { return _committed; }
 370   size_t garbage() { return _garbage; }
 371   size_t regions() { return _regions; }
 372   size_t waste() { return _humongous_waste; }
 373 
 374   // span is the total memory affiliated with these stats (some of which is in use and other is available)
 375   size_t span() { return _regions * ShenandoahHeapRegion::region_size_bytes(); }
 376 };
 377 
 378 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure {
 379  public:
 380   ShenandoahCalculateRegionStatsClosure old;
 381   ShenandoahCalculateRegionStatsClosure young;
 382   ShenandoahCalculateRegionStatsClosure global;
 383 
 384   void heap_region_do(ShenandoahHeapRegion* r) override {
 385     switch (r->affiliation()) {
 386       case FREE:
 387         return;
 388       case YOUNG_GENERATION:
 389         young.heap_region_do(r);
 390         global.heap_region_do(r);
 391         break;
 392       case OLD_GENERATION:
 393         old.heap_region_do(r);
 394         global.heap_region_do(r);
 395         break;
 396       default:
 397         ShouldNotReachHere();
 398     }
 399   }
 400 
 401   static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 402     log_debug(gc)("Safepoint verification: %s verified usage: " SIZE_FORMAT "%s, recorded usage: " SIZE_FORMAT "%s",
 403                   generation->name(),
 404                   byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()),
 405                   byte_size_in_proper_unit(stats.used()),       proper_unit_for_byte_size(stats.used()));
 406   }
 407 
 408   static void validate_usage(const bool adjust_for_padding,
 409                              const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) {
 410     ShenandoahHeap* heap = ShenandoahHeap::heap();
 411     size_t generation_used = generation->used();
 412     size_t generation_used_regions = generation->used_regions();
 413     if (adjust_for_padding && (generation->is_young() || generation->is_global())) {
 414       size_t pad = ShenandoahHeap::heap()->get_pad_for_promote_in_place();
 415       generation_used += pad;
 416     }
 417 
 418     guarantee(stats.used() == generation_used,
 419               "%s: generation (%s) used size must be consistent: generation-used: " SIZE_FORMAT "%s, regions-used: " SIZE_FORMAT "%s",
 420               label, generation->name(),
 421               byte_size_in_proper_unit(generation_used), proper_unit_for_byte_size(generation_used),
 422               byte_size_in_proper_unit(stats.used()),    proper_unit_for_byte_size(stats.used()));
 423 
 424     guarantee(stats.regions() == generation_used_regions,
 425               "%s: generation (%s) used regions (" SIZE_FORMAT ") must equal regions that are in use (" SIZE_FORMAT ")",
 426               label, generation->name(), generation->used_regions(), stats.regions());
 427 
 428     size_t generation_capacity = generation->max_capacity();
 429     size_t humongous_regions_promoted = 0;
 430     guarantee(stats.span() <= generation_capacity,
 431               "%s: generation (%s) size spanned by regions (" SIZE_FORMAT ") must not exceed current capacity (" SIZE_FORMAT "%s)",
 432               label, generation->name(), stats.regions(),
 433               byte_size_in_proper_unit(generation_capacity), proper_unit_for_byte_size(generation_capacity));
 434 
 435     size_t humongous_waste = generation->get_humongous_waste();
 436     guarantee(stats.waste() == humongous_waste,
 437               "%s: generation (%s) humongous waste must be consistent: generation: " SIZE_FORMAT "%s, regions: " SIZE_FORMAT "%s",
 438               label, generation->name(),
 439               byte_size_in_proper_unit(humongous_waste), proper_unit_for_byte_size(humongous_waste),
 440               byte_size_in_proper_unit(stats.waste()),   proper_unit_for_byte_size(stats.waste()));
 441   }
 442 };
 443 
 444 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure {
 445 private:
 446   ShenandoahHeap* _heap;
 447   const char* _phase;
 448   ShenandoahVerifier::VerifyRegions _regions;
 449 public:
 450   ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) :
 451     _heap(ShenandoahHeap::heap()),
 452     _phase(phase),
 453     _regions(regions) {};
 454 
 455   void print_failure(ShenandoahHeapRegion* r, const char* label) {
 456     ResourceMark rm;
 457 
 458     ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label);
 459 
 460     stringStream ss;
 461     r->print_on(&ss);
 462     msg.append("%s", ss.as_string());
 463 
 464     report_vm_error(__FILE__, __LINE__, msg.buffer());
 465   }
 466 
 467   void verify(ShenandoahHeapRegion* r, bool test, const char* msg) {
 468     if (!test) {
 469       print_failure(r, msg);
 470     }
 471   }
 472 
 473   void heap_region_do(ShenandoahHeapRegion* r) {
 474     switch (_regions) {
 475       case ShenandoahVerifier::_verify_regions_disable:
 476         break;
 477       case ShenandoahVerifier::_verify_regions_notrash:
 478         verify(r, !r->is_trash(),
 479                "Should not have trash regions");
 480         break;
 481       case ShenandoahVerifier::_verify_regions_nocset:
 482         verify(r, !r->is_cset(),
 483                "Should not have cset regions");
 484         break;
 485       case ShenandoahVerifier::_verify_regions_notrash_nocset:
 486         verify(r, !r->is_trash(),
 487                "Should not have trash regions");
 488         verify(r, !r->is_cset(),
 489                "Should not have cset regions");
 490         break;
 491       default:
 492         ShouldNotReachHere();
 493     }
 494 
 495     verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(),
 496            "Capacity should match region size");
 497 
 498     verify(r, r->bottom() <= r->top(),
 499            "Region top should not be less than bottom");
 500 
 501     verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r),
 502            "Region TAMS should not be less than bottom");
 503 
 504     verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(),
 505            "Complete TAMS should not be larger than top");
 506 
 507     verify(r, r->get_live_data_bytes() <= r->capacity(),
 508            "Live data cannot be larger than capacity");
 509 
 510     verify(r, r->garbage() <= r->capacity(),
 511            "Garbage cannot be larger than capacity");
 512 
 513     verify(r, r->used() <= r->capacity(),
 514            "Used cannot be larger than capacity");
 515 
 516     verify(r, r->get_shared_allocs() <= r->capacity(),
 517            "Shared alloc count should not be larger than capacity");
 518 
 519     verify(r, r->get_tlab_allocs() <= r->capacity(),
 520            "TLAB alloc count should not be larger than capacity");
 521 
 522     verify(r, r->get_gclab_allocs() <= r->capacity(),
 523            "GCLAB alloc count should not be larger than capacity");
 524 
 525     verify(r, r->get_plab_allocs() <= r->capacity(),
 526            "PLAB alloc count should not be larger than capacity");
 527 
 528     verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(),
 529            "Accurate accounting: shared + TLAB + GCLAB + PLAB = used");
 530 
 531     verify(r, !r->is_empty() || !r->has_live(),
 532            "Empty regions should not have live data");
 533 
 534     verify(r, r->is_cset() == _heap->collection_set()->is_in(r),
 535            "Transitional: region flags and collection set agree");
 536   }
 537 };
 538 
 539 class ShenandoahVerifierReachableTask : public WorkerTask {
 540 private:
 541   const char* _label;
 542   ShenandoahVerifier::VerifyOptions _options;
 543   ShenandoahHeap* _heap;
 544   ShenandoahLivenessData* _ld;
 545   MarkBitMap* _bitmap;
 546   volatile size_t _processed;
 547 
 548 public:
 549   ShenandoahVerifierReachableTask(MarkBitMap* bitmap,
 550                                   ShenandoahLivenessData* ld,
 551                                   const char* label,
 552                                   ShenandoahVerifier::VerifyOptions options) :
 553     WorkerTask("Shenandoah Verifier Reachable Objects"),
 554     _label(label),
 555     _options(options),
 556     _heap(ShenandoahHeap::heap()),
 557     _ld(ld),
 558     _bitmap(bitmap),
 559     _processed(0) {};
 560 
 561   size_t processed() {
 562     return _processed;
 563   }
 564 
 565   virtual void work(uint worker_id) {
 566     ResourceMark rm;
 567     ShenandoahVerifierStack stack;
 568 
 569     // On level 2, we need to only check the roots once.
 570     // On level 3, we want to check the roots, and seed the local stack.
 571     // It is a lesser evil to accept multiple root scans at level 3, because
 572     // extended parallelism would buy us out.
 573     if (((ShenandoahVerifyLevel == 2) && (worker_id == 0))
 574         || (ShenandoahVerifyLevel >= 3)) {
 575         ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 576                                       ShenandoahMessageBuffer("%s, Roots", _label),
 577                                       _options);
 578         if (_heap->unload_classes()) {
 579           ShenandoahRootVerifier::strong_roots_do(&cl);
 580         } else {
 581           ShenandoahRootVerifier::roots_do(&cl);
 582         }
 583     }
 584 
 585     size_t processed = 0;
 586 
 587     if (ShenandoahVerifyLevel >= 3) {
 588       ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 589                                     ShenandoahMessageBuffer("%s, Reachable", _label),
 590                                     _options);
 591       while (!stack.is_empty()) {
 592         processed++;
 593         ShenandoahVerifierTask task = stack.pop();
 594         cl.verify_oops_from(task.obj());
 595       }
 596     }
 597 
 598     Atomic::add(&_processed, processed, memory_order_relaxed);
 599   }
 600 };
 601 
 602 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure {
 603 public:
 604   virtual void do_thread(Thread* thread) {
 605     SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread);
 606     if (!queue.is_empty()) {
 607       fatal("All SATB buffers should have been flushed during mark");
 608     }
 609   }
 610 };
 611 
 612 class ShenandoahVerifierMarkedRegionTask : public WorkerTask {
 613 private:
 614   const char* _label;
 615   ShenandoahVerifier::VerifyOptions _options;
 616   ShenandoahHeap *_heap;
 617   MarkBitMap* _bitmap;
 618   ShenandoahLivenessData* _ld;
 619   volatile size_t _claimed;
 620   volatile size_t _processed;
 621   ShenandoahGeneration* _generation;
 622 
 623 public:
 624   ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap,
 625                                      ShenandoahLivenessData* ld,
 626                                      const char* label,
 627                                      ShenandoahVerifier::VerifyOptions options) :
 628           WorkerTask("Shenandoah Verifier Marked Objects"),
 629           _label(label),
 630           _options(options),
 631           _heap(ShenandoahHeap::heap()),
 632           _bitmap(bitmap),
 633           _ld(ld),
 634           _claimed(0),
 635           _processed(0),
 636           _generation(nullptr) {
 637     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 638       Threads::change_thread_claim_token();
 639     }
 640 
 641     if (_heap->mode()->is_generational()) {
 642       _generation = _heap->active_generation();
 643       assert(_generation != nullptr, "Expected active generation in this mode.");
 644     }
 645   };
 646 
 647   size_t processed() {
 648     return Atomic::load(&_processed);
 649   }
 650 
 651   virtual void work(uint worker_id) {
 652     if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) {
 653       ShenandoahVerifyNoIncompleteSatbBuffers verify_satb;
 654       Threads::possibly_parallel_threads_do(true, &verify_satb);
 655     }
 656 
 657     ShenandoahVerifierStack stack;
 658     ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld,
 659                                   ShenandoahMessageBuffer("%s, Marked", _label),
 660                                   _options);
 661 
 662     while (true) {
 663       size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed);
 664       if (v < _heap->num_regions()) {
 665         ShenandoahHeapRegion* r = _heap->get_region(v);
 666         if (!in_generation(r)) {
 667           continue;
 668         }
 669 
 670         if (!r->is_humongous() && !r->is_trash()) {
 671           work_regular(r, stack, cl);
 672         } else if (r->is_humongous_start()) {
 673           work_humongous(r, stack, cl);
 674         }
 675       } else {
 676         break;
 677       }
 678     }
 679   }
 680 
 681   bool in_generation(ShenandoahHeapRegion* r) {
 682     return _generation == nullptr || _generation->contains(r);
 683   }
 684 
 685   virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) {
 686     size_t processed = 0;
 687     HeapWord* obj = r->bottom();
 688     if (_heap->complete_marking_context()->is_marked(cast_to_oop(obj))) {
 689       verify_and_follow(obj, stack, cl, &processed);
 690     }
 691     Atomic::add(&_processed, processed, memory_order_relaxed);
 692   }
 693 
 694   virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) {
 695     size_t processed = 0;
 696     ShenandoahMarkingContext* ctx = _heap->complete_marking_context();
 697     HeapWord* tams = ctx->top_at_mark_start(r);
 698 
 699     // Bitmaps, before TAMS
 700     if (tams > r->bottom()) {
 701       HeapWord* start = r->bottom();
 702       HeapWord* addr = ctx->get_next_marked_addr(start, tams);
 703 
 704       while (addr < tams) {
 705         verify_and_follow(addr, stack, cl, &processed);
 706         addr += 1;
 707         if (addr < tams) {
 708           addr = ctx->get_next_marked_addr(addr, tams);
 709         }
 710       }
 711     }
 712 
 713     // Size-based, after TAMS
 714     {
 715       HeapWord* limit = r->top();
 716       HeapWord* addr = tams;
 717 
 718       while (addr < limit) {
 719         verify_and_follow(addr, stack, cl, &processed);
 720         addr += cast_to_oop(addr)->size();
 721       }
 722     }
 723 
 724     Atomic::add(&_processed, processed, memory_order_relaxed);
 725   }
 726 
 727   void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) {
 728     if (!_bitmap->par_mark(addr)) return;
 729 
 730     // Verify the object itself:
 731     oop obj = cast_to_oop(addr);
 732     cl.verify_oop_standalone(obj);
 733 
 734     // Verify everything reachable from that object too, hopefully realizing
 735     // everything was already marked, and never touching further:
 736     if (!is_instance_ref_klass(obj->klass())) {
 737       cl.verify_oops_from(obj);
 738       (*processed)++;
 739     }
 740     while (!stack.is_empty()) {
 741       ShenandoahVerifierTask task = stack.pop();
 742       cl.verify_oops_from(task.obj());
 743       (*processed)++;
 744     }
 745   }
 746 };
 747 
 748 class VerifyThreadGCState : public ThreadClosure {
 749 private:
 750   const char* const _label;
 751          char const _expected;
 752 
 753 public:
 754   VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {}
 755   void do_thread(Thread* t) {
 756     char actual = ShenandoahThreadLocalData::gc_state(t);
 757     if (!verify_gc_state(actual, _expected)) {
 758       fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual);
 759     }
 760   }
 761 
 762   static bool verify_gc_state(char actual, char expected) {
 763     // Old generation marking is allowed in all states.
 764     if (ShenandoahHeap::heap()->mode()->is_generational()) {
 765       return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected);
 766     } else {
 767       assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode");
 768       return (actual == expected);
 769     }
 770   }
 771 };
 772 
 773 void ShenandoahVerifier::verify_at_safepoint(const char* label,
 774                                              VerifyRememberedSet remembered,
 775                                              VerifyForwarded forwarded, VerifyMarked marked,
 776                                              VerifyCollectionSet cset,
 777                                              VerifyLiveness liveness, VerifyRegions regions,
 778                                              VerifySize sizeness,
 779                                              VerifyGCState gcstate) {
 780   guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens");
 781   guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize");
 782 
 783   // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check
 784   ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/);
 785 
 786   log_info(gc,start)("Verify %s, Level " INTX_FORMAT, label, ShenandoahVerifyLevel);
 787 
 788   // GC state checks
 789   {
 790     char expected = -1;
 791     bool enabled;
 792     switch (gcstate) {
 793       case _verify_gcstate_disable:
 794         enabled = false;
 795         break;
 796       case _verify_gcstate_forwarded:
 797         enabled = true;
 798         expected = ShenandoahHeap::HAS_FORWARDED;
 799         break;
 800       case _verify_gcstate_evacuation:
 801         enabled = true;
 802         expected = ShenandoahHeap::EVACUATION;
 803         if (!_heap->is_stw_gc_in_progress()) {
 804           // Only concurrent GC sets this.
 805           expected |= ShenandoahHeap::WEAK_ROOTS;
 806         }
 807         break;
 808       case _verify_gcstate_updating:
 809         enabled = true;
 810         expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATEREFS;
 811         break;
 812       case _verify_gcstate_stable:
 813         enabled = true;
 814         expected = ShenandoahHeap::STABLE;
 815         break;
 816       case _verify_gcstate_stable_weakroots:
 817         enabled = true;
 818         expected = ShenandoahHeap::STABLE;
 819         if (!_heap->is_stw_gc_in_progress()) {
 820           // Only concurrent GC sets this.
 821           expected |= ShenandoahHeap::WEAK_ROOTS;
 822         }
 823         break;
 824       default:
 825         enabled = false;
 826         assert(false, "Unhandled gc-state verification");
 827     }
 828 
 829     if (enabled) {
 830       char actual = _heap->gc_state();
 831 
 832       bool is_marking = (actual & ShenandoahHeap::MARKING)? 1: 0;
 833       bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING))? 1: 0;
 834       assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual);
 835 
 836       // Old generation marking is allowed in all states.
 837       if (!VerifyThreadGCState::verify_gc_state(actual, expected)) {
 838         fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual);
 839       }
 840 
 841       VerifyThreadGCState vtgcs(label, expected);
 842       Threads::java_threads_do(&vtgcs);
 843     }
 844   }
 845 
 846   // Deactivate barriers temporarily: Verifier wants plain heap accesses
 847   ShenandoahGCStateResetter resetter;
 848 
 849   // Heap size checks
 850   {
 851     ShenandoahHeapLocker lock(_heap->lock());
 852 
 853     ShenandoahCalculateRegionStatsClosure cl;
 854     _heap->heap_region_iterate(&cl);
 855     size_t heap_used;
 856     if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) {
 857       // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations
 858       heap_used = _heap->used() + _heap->get_pad_for_promote_in_place();
 859     } else if (sizeness != _verify_size_disable) {
 860       heap_used = _heap->used();
 861     }
 862     if (sizeness != _verify_size_disable) {
 863       guarantee(cl.used() == heap_used,
 864                 "%s: heap used size must be consistent: heap-used = " SIZE_FORMAT "%s, regions-used = " SIZE_FORMAT "%s",
 865                 label,
 866                 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used),
 867                 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used()));
 868     }
 869     size_t heap_committed = _heap->committed();
 870     guarantee(cl.committed() == heap_committed,
 871               "%s: heap committed size must be consistent: heap-committed = " SIZE_FORMAT "%s, regions-committed = " SIZE_FORMAT "%s",
 872               label,
 873               byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed),
 874               byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed()));
 875   }
 876 
 877   log_debug(gc)("Safepoint verification finished heap usage verification");
 878 
 879   ShenandoahGeneration* generation;
 880   if (_heap->mode()->is_generational()) {
 881     generation = _heap->active_generation();
 882     guarantee(generation != nullptr, "Need to know which generation to verify.");
 883   } else {
 884     generation = nullptr;
 885   }
 886 
 887   if (generation != nullptr) {
 888     ShenandoahHeapLocker lock(_heap->lock());
 889 
 890     switch (remembered) {
 891       case _verify_remembered_disable:
 892         break;
 893       case _verify_remembered_before_marking:
 894         log_debug(gc)("Safepoint verification of remembered set at mark");
 895         verify_rem_set_before_mark();
 896         break;
 897       case _verify_remembered_before_updating_references:
 898         log_debug(gc)("Safepoint verification of remembered set at update ref");
 899         verify_rem_set_before_update_ref();
 900         break;
 901       case _verify_remembered_after_full_gc:
 902         log_debug(gc)("Safepoint verification of remembered set after full gc");
 903         verify_rem_set_after_full_gc();
 904         break;
 905       default:
 906         fatal("Unhandled remembered set verification mode");
 907     }
 908 
 909     ShenandoahGenerationStatsClosure cl;
 910     _heap->heap_region_iterate(&cl);
 911 
 912     if (LogTarget(Debug, gc)::is_enabled()) {
 913       ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(),    cl.old);
 914       ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(),  cl.young);
 915       ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global);
 916     }
 917     if (sizeness == _verify_size_adjusted_for_padding) {
 918       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 919       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young);
 920       ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global);
 921     } else if (sizeness == _verify_size_exact) {
 922       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old);
 923       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young);
 924       ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global);
 925     }
 926     // else: sizeness must equal _verify_size_disable
 927   }
 928 
 929   log_debug(gc)("Safepoint verification finished remembered set verification");
 930 
 931   // Internal heap region checks
 932   if (ShenandoahVerifyLevel >= 1) {
 933     ShenandoahVerifyHeapRegionClosure cl(label, regions);
 934     if (generation != nullptr) {
 935       generation->heap_region_iterate(&cl);
 936     } else {
 937       _heap->heap_region_iterate(&cl);
 938     }
 939   }
 940 
 941   log_debug(gc)("Safepoint verification finished heap region closure verification");
 942 
 943   OrderAccess::fence();
 944 
 945   if (UseTLAB) {
 946     _heap->labs_make_parsable();
 947   }
 948 
 949   // Allocate temporary bitmap for storing marking wavefront:
 950   _verification_bit_map->clear();
 951 
 952   // Allocate temporary array for storing liveness data
 953   ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC);
 954   Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0);
 955 
 956   const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate);
 957 
 958   // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap.
 959   // This verifies what application can see, since it only cares about reachable objects.
 960   size_t count_reachable = 0;
 961   if (ShenandoahVerifyLevel >= 2) {
 962     ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options);
 963     _heap->workers()->run_task(&task);
 964     count_reachable = task.processed();
 965   }
 966 
 967   log_debug(gc)("Safepoint verification finished getting initial reachable set");
 968 
 969   // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector,
 970   // not the application, can see during the region scans. There is no reason to process the objects
 971   // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS:
 972   // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics
 973   // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect)
 974   // version
 975 
 976   size_t count_marked = 0;
 977   if (ShenandoahVerifyLevel >= 4 &&
 978         (marked == _verify_marked_complete ||
 979          marked == _verify_marked_complete_except_references ||
 980          marked == _verify_marked_complete_satb_empty)) {
 981     guarantee(_heap->marking_context()->is_complete(), "Marking context should be complete");
 982     ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options);
 983     _heap->workers()->run_task(&task);
 984     count_marked = task.processed();
 985   } else {
 986     guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be");
 987   }
 988 
 989   log_debug(gc)("Safepoint verification finished walking marked objects");
 990 
 991   // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes
 992   // marked objects.
 993 
 994   if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) {
 995     for (size_t i = 0; i < _heap->num_regions(); i++) {
 996       ShenandoahHeapRegion* r = _heap->get_region(i);
 997       if (generation != nullptr && !generation->contains(r)) {
 998         continue;
 999       }
1000 
1001       juint verf_live = 0;
1002       if (r->is_humongous()) {
1003         // For humongous objects, test if start region is marked live, and if so,
1004         // all humongous regions in that chain have live data equal to their "used".
1005         juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]);
1006         if (start_live > 0) {
1007           verf_live = (juint)(r->used() / HeapWordSize);
1008         }
1009       } else {
1010         verf_live = Atomic::load(&ld[r->index()]);
1011       }
1012 
1013       size_t reg_live = r->get_live_data_words();
1014       if (reg_live != verf_live) {
1015         stringStream ss;
1016         r->print_on(&ss);
1017         fatal("%s: Live data should match: region-live = " SIZE_FORMAT ", verifier-live = " UINT32_FORMAT "\n%s",
1018               label, reg_live, verf_live, ss.freeze());
1019       }
1020     }
1021   }
1022 
1023   log_debug(gc)("Safepoint verification finished accumulation of liveness data");
1024 
1025 
1026   log_info(gc)("Verify %s, Level " INTX_FORMAT " (" SIZE_FORMAT " reachable, " SIZE_FORMAT " marked)",
1027                label, ShenandoahVerifyLevel, count_reachable, count_marked);
1028 
1029   FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld);
1030 }
1031 
1032 void ShenandoahVerifier::verify_generic(VerifyOption vo) {
1033   verify_at_safepoint(
1034           "Generic Verification",
1035           _verify_remembered_disable,  // do not verify remembered set
1036           _verify_forwarded_allow,     // conservatively allow forwarded
1037           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1038           _verify_cset_disable,        // cset may be inconsistent
1039           _verify_liveness_disable,    // no reliable liveness data
1040           _verify_regions_disable,     // no reliable region data
1041           _verify_size_exact,          // expect generation and heap sizes to match exactly
1042           _verify_gcstate_disable      // no data about gcstate
1043   );
1044 }
1045 
1046 void ShenandoahVerifier::verify_before_concmark() {
1047     verify_at_safepoint(
1048           "Before Mark",
1049           _verify_remembered_before_marking,
1050                                        // verify read-only remembered set from bottom() to top()
1051           _verify_forwarded_none,      // UR should have fixed up
1052           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1053           _verify_cset_none,           // UR should have fixed this
1054           _verify_liveness_disable,    // no reliable liveness data
1055           _verify_regions_notrash,     // no trash regions
1056           _verify_size_exact,          // expect generation and heap sizes to match exactly
1057           _verify_gcstate_stable       // there are no forwarded objects
1058   );
1059 }
1060 
1061 void ShenandoahVerifier::verify_after_concmark() {
1062   verify_at_safepoint(
1063           "After Mark",
1064           _verify_remembered_disable,  // do not verify remembered set
1065           _verify_forwarded_none,      // no forwarded references
1066           _verify_marked_complete_satb_empty,
1067                                        // bitmaps as precise as we can get, except dangling j.l.r.Refs
1068           _verify_cset_none,           // no references to cset anymore
1069           _verify_liveness_complete,   // liveness data must be complete here
1070           _verify_regions_disable,     // trash regions not yet recycled
1071           _verify_size_exact,          // expect generation and heap sizes to match exactly
1072           _verify_gcstate_stable_weakroots  // heap is still stable, weakroots are in progress
1073   );
1074 }
1075 
1076 void ShenandoahVerifier::verify_before_evacuation() {
1077   verify_at_safepoint(
1078           "Before Evacuation",
1079           _verify_remembered_disable,                // do not verify remembered set
1080           _verify_forwarded_none,                    // no forwarded references
1081           _verify_marked_complete_except_references, // walk over marked objects too
1082           _verify_cset_disable,                      // non-forwarded references to cset expected
1083           _verify_liveness_complete,                 // liveness data must be complete here
1084           _verify_regions_disable,                   // trash regions not yet recycled
1085           _verify_size_adjusted_for_padding,         // expect generation and heap sizes to match after adjustments
1086                                                      //  for promote in place padding
1087           _verify_gcstate_stable_weakroots           // heap is still stable, weakroots are in progress
1088   );
1089 }
1090 
1091 void ShenandoahVerifier::verify_during_evacuation() {
1092   verify_at_safepoint(
1093           "During Evacuation",
1094           _verify_remembered_disable, // do not verify remembered set
1095           _verify_forwarded_allow,    // some forwarded references are allowed
1096           _verify_marked_disable,     // walk only roots
1097           _verify_cset_disable,       // some cset references are not forwarded yet
1098           _verify_liveness_disable,   // liveness data might be already stale after pre-evacs
1099           _verify_regions_disable,    // trash regions not yet recycled
1100           _verify_size_disable,       // we don't know how much of promote-in-place work has been completed
1101           _verify_gcstate_evacuation  // evacuation is in progress
1102   );
1103 }
1104 
1105 void ShenandoahVerifier::verify_after_evacuation() {
1106   verify_at_safepoint(
1107           "After Evacuation",
1108           _verify_remembered_disable,  // do not verify remembered set
1109           _verify_forwarded_allow,     // objects are still forwarded
1110           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1111           _verify_cset_forwarded,      // all cset refs are fully forwarded
1112           _verify_liveness_disable,    // no reliable liveness data anymore
1113           _verify_regions_notrash,     // trash regions have been recycled already
1114           _verify_size_exact,          // expect generation and heap sizes to match exactly
1115           _verify_gcstate_forwarded    // evacuation produced some forwarded objects
1116   );
1117 }
1118 
1119 void ShenandoahVerifier::verify_before_updaterefs() {
1120   verify_at_safepoint(
1121           "Before Updating References",
1122           _verify_remembered_before_updating_references,  // verify read-write remembered set
1123           _verify_forwarded_allow,     // forwarded references allowed
1124           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1125           _verify_cset_forwarded,      // all cset refs are fully forwarded
1126           _verify_liveness_disable,    // no reliable liveness data anymore
1127           _verify_regions_notrash,     // trash regions have been recycled already
1128           _verify_size_exact,          // expect generation and heap sizes to match exactly
1129           _verify_gcstate_updating     // evacuation should have produced some forwarded objects
1130   );
1131 }
1132 
1133 // We have not yet cleanup (reclaimed) the collection set
1134 void ShenandoahVerifier::verify_after_updaterefs() {
1135   verify_at_safepoint(
1136           "After Updating References",
1137           _verify_remembered_disable,  // do not verify remembered set
1138           _verify_forwarded_none,      // no forwarded references
1139           _verify_marked_complete,     // bitmaps might be stale, but alloc-after-mark should be well
1140           _verify_cset_none,           // no cset references, all updated
1141           _verify_liveness_disable,    // no reliable liveness data anymore
1142           _verify_regions_nocset,      // no cset regions, trash regions have appeared
1143           _verify_size_exact,          // expect generation and heap sizes to match exactly
1144           _verify_gcstate_stable       // update refs had cleaned up forwarded objects
1145   );
1146 }
1147 
1148 void ShenandoahVerifier::verify_after_degenerated() {
1149   verify_at_safepoint(
1150           "After Degenerated GC",
1151           _verify_remembered_disable,  // do not verify remembered set
1152           _verify_forwarded_none,      // all objects are non-forwarded
1153           _verify_marked_complete,     // all objects are marked in complete bitmap
1154           _verify_cset_none,           // no cset references
1155           _verify_liveness_disable,    // no reliable liveness data anymore
1156           _verify_regions_notrash_nocset, // no trash, no cset
1157           _verify_size_exact,          // expect generation and heap sizes to match exactly
1158           _verify_gcstate_stable       // degenerated refs had cleaned up forwarded objects
1159   );
1160 }
1161 
1162 void ShenandoahVerifier::verify_before_fullgc() {
1163   verify_at_safepoint(
1164           "Before Full GC",
1165           _verify_remembered_disable,  // do not verify remembered set
1166           _verify_forwarded_allow,     // can have forwarded objects
1167           _verify_marked_disable,      // do not verify marked: lots ot time wasted checking dead allocations
1168           _verify_cset_disable,        // cset might be foobared
1169           _verify_liveness_disable,    // no reliable liveness data anymore
1170           _verify_regions_disable,     // no reliable region data here
1171           _verify_size_disable,        // if we degenerate during evacuation, usage not valid: padding and deferred accounting
1172           _verify_gcstate_disable      // no reliable gcstate data
1173   );
1174 }
1175 
1176 void ShenandoahVerifier::verify_after_fullgc() {
1177   verify_at_safepoint(
1178           "After Full GC",
1179           _verify_remembered_after_full_gc,  // verify read-write remembered set
1180           _verify_forwarded_none,      // all objects are non-forwarded
1181           _verify_marked_complete,     // all objects are marked in complete bitmap
1182           _verify_cset_none,           // no cset references
1183           _verify_liveness_disable,    // no reliable liveness data anymore
1184           _verify_regions_notrash_nocset, // no trash, no cset
1185           _verify_size_exact,           // expect generation and heap sizes to match exactly
1186           _verify_gcstate_stable        // full gc cleaned up everything
1187   );
1188 }
1189 
1190 // TODO: Why this closure does not visit metadata?
1191 class ShenandoahVerifyNoForwared : public BasicOopIterateClosure {
1192 private:
1193   template <class T>
1194   void do_oop_work(T* p) {
1195     T o = RawAccess<>::oop_load(p);
1196     if (!CompressedOops::is_null(o)) {
1197       oop obj = CompressedOops::decode_not_null(o);
1198       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1199       if (obj != fwd) {
1200         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1201                                          "Verify Roots", "Should not be forwarded", __FILE__, __LINE__);
1202       }
1203     }
1204   }
1205 
1206 public:
1207   void do_oop(narrowOop* p) { do_oop_work(p); }
1208   void do_oop(oop* p)       { do_oop_work(p); }
1209 };
1210 
1211 // TODO: Why this closure does not visit metadata?
1212 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure {
1213 private:
1214   template <class T>
1215   void do_oop_work(T* p) {
1216     T o = RawAccess<>::oop_load(p);
1217     if (!CompressedOops::is_null(o)) {
1218       oop obj = CompressedOops::decode_not_null(o);
1219       ShenandoahHeap* heap = ShenandoahHeap::heap();
1220 
1221       if (!heap->marking_context()->is_marked_or_old(obj)) {
1222         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1223                 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__);
1224       }
1225 
1226       if (heap->in_collection_set(obj)) {
1227         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1228                 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__);
1229       }
1230 
1231       oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj);
1232       if (obj != fwd) {
1233         ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1234                 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__);
1235       }
1236     }
1237   }
1238 
1239 public:
1240   void do_oop(narrowOop* p) { do_oop_work(p); }
1241   void do_oop(oop* p)       { do_oop_work(p); }
1242 };
1243 
1244 void ShenandoahVerifier::verify_roots_in_to_space() {
1245   ShenandoahVerifyInToSpaceClosure cl;
1246   ShenandoahRootVerifier::roots_do(&cl);
1247 }
1248 
1249 void ShenandoahVerifier::verify_roots_no_forwarded() {
1250   ShenandoahVerifyNoForwared cl;
1251   ShenandoahRootVerifier::roots_do(&cl);
1252 }
1253 
1254 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure {
1255 protected:
1256   bool               const _init_mark;
1257   ShenandoahHeap*    const _heap;
1258   RememberedScanner* const _scanner;
1259 
1260 public:
1261   // Argument distinguishes between initial mark or start of update refs verification.
1262   ShenandoahVerifyRemSetClosure(bool init_mark) :
1263             _init_mark(init_mark),
1264             _heap(ShenandoahHeap::heap()),
1265             _scanner(_heap->card_scan()) {}
1266 
1267   template<class T>
1268   inline void work(T* p) {
1269     T o = RawAccess<>::oop_load(p);
1270     if (!CompressedOops::is_null(o)) {
1271       oop obj = CompressedOops::decode_not_null(o);
1272       if (_heap->is_in_young(obj)) {
1273         size_t card_index = _scanner->card_index_for_addr((HeapWord*) p);
1274         if (_init_mark && !_scanner->is_card_dirty(card_index)) {
1275           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1276                                            "Verify init-mark remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1277         } else if (!_init_mark && !_scanner->is_write_card_dirty(card_index)) {
1278           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr,
1279                                            "Verify init-update-refs remembered set violation", "clean card should be dirty", __FILE__, __LINE__);
1280         }
1281       }
1282     }
1283   }
1284 
1285   virtual void do_oop(narrowOop* p) { work(p); }
1286   virtual void do_oop(oop* p)       { work(p); }
1287 };
1288 
1289 void ShenandoahVerifier::help_verify_region_rem_set(ShenandoahHeapRegion* r, ShenandoahMarkingContext* ctx, HeapWord* from,
1290                                                     HeapWord* top, HeapWord* registration_watermark, const char* message) {
1291   RememberedScanner* scanner = _heap->card_scan();
1292   ShenandoahVerifyRemSetClosure check_interesting_pointers(false);
1293 
1294   HeapWord* obj_addr = from;
1295   if (r->is_humongous_start()) {
1296     oop obj = cast_to_oop(obj_addr);
1297     if ((ctx == nullptr) || ctx->is_marked(obj)) {
1298       size_t card_index = scanner->card_index_for_addr(obj_addr);
1299       // For humongous objects, the typical object is an array, so the following checks may be overkill
1300       // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1301       // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1302       if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1303         obj->oop_iterate(&check_interesting_pointers);
1304       }
1305       // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1306     }
1307     // else, this humongous object is not live so no need to verify its internal pointers
1308 
1309     if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1310       ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1311                                        "object not properly registered", __FILE__, __LINE__);
1312     }
1313   } else if (!r->is_humongous()) {
1314     while (obj_addr < top) {
1315       oop obj = cast_to_oop(obj_addr);
1316       // ctx->is_marked() returns true if mark bit set or if obj above TAMS.
1317       if ((ctx == nullptr) || ctx->is_marked(obj)) {
1318         size_t card_index = scanner->card_index_for_addr(obj_addr);
1319         // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1320         // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1321         if (!scanner->is_write_card_dirty(card_index) || obj->is_objArray()) {
1322           obj->oop_iterate(&check_interesting_pointers);
1323         }
1324         // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1325 
1326         if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) {
1327           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message,
1328                                            "object not properly registered", __FILE__, __LINE__);
1329         }
1330         obj_addr += obj->size();
1331       } else {
1332         // This object is not live so we don't verify dirty cards contained therein
1333         HeapWord* tams = ctx->top_at_mark_start(r);
1334         obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1335       }
1336     }
1337   }
1338 }
1339 
1340 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.
1341 // This examines the read_card_table between bottom() and top() since all PLABS are retired
1342 // before the safepoint for init_mark.  Actually, we retire them before update-references and don't
1343 // restore them until the start of evacuation.
1344 void ShenandoahVerifier::verify_rem_set_before_mark() {
1345   shenandoah_assert_safepoint();
1346   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1347 
1348   ShenandoahRegionIterator iterator;
1349   RememberedScanner* scanner = _heap->card_scan();
1350   ShenandoahVerifyRemSetClosure check_interesting_pointers(true);
1351   ShenandoahMarkingContext* ctx;
1352 
1353   log_debug(gc)("Verifying remembered set at %s mark", _heap->doing_mixed_evacuations()? "mixed": "young");
1354 
1355   if (_heap->is_old_bitmap_stable() || _heap->active_generation()->is_global()) {
1356     ctx = _heap->complete_marking_context();
1357   } else {
1358     ctx = nullptr;
1359   }
1360 
1361   while (iterator.has_next()) {
1362     ShenandoahHeapRegion* r = iterator.next();
1363     if (r == nullptr) {
1364       // TODO: Can this really happen?
1365       break;
1366     }
1367 
1368     HeapWord* tams = (ctx != nullptr) ? ctx->top_at_mark_start(r) : nullptr;
1369 
1370     // TODO: Is this replaceable with call to help_verify_region_rem_set?
1371 
1372     if (r->is_old() && r->is_active()) {
1373       HeapWord* obj_addr = r->bottom();
1374       if (r->is_humongous_start()) {
1375         oop obj = cast_to_oop(obj_addr);
1376         if ((ctx == nullptr) || ctx->is_marked(obj)) {
1377           // For humongous objects, the typical object is an array, so the following checks may be overkill
1378           // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1379           // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1380           if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1381             obj->oop_iterate(&check_interesting_pointers);
1382           }
1383           // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1384         }
1385         // else, this humongous object is not marked so no need to verify its internal pointers
1386         if (!scanner->verify_registration(obj_addr, ctx)) {
1387           ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1388                                            "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1389         }
1390       } else if (!r->is_humongous()) {
1391         HeapWord* top = r->top();
1392         while (obj_addr < top) {
1393           oop obj = cast_to_oop(obj_addr);
1394           // ctx->is_marked() returns true if mark bit set (TAMS not relevant during init mark)
1395           if ((ctx == nullptr) || ctx->is_marked(obj)) {
1396             // For regular objects (not object arrays), if the card holding the start of the object is dirty,
1397             // we do not need to verify that cards spanning interesting pointers within this object are dirty.
1398             if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) {
1399               obj->oop_iterate(&check_interesting_pointers);
1400             }
1401             // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered
1402             if (!scanner->verify_registration(obj_addr, ctx)) {
1403               ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, nullptr, nullptr,
1404                                                "Verify init-mark remembered set violation", "object not properly registered", __FILE__, __LINE__);
1405             }
1406             obj_addr += obj->size();
1407           } else {
1408             // This object is not live so we don't verify dirty cards contained therein
1409             assert(tams != nullptr, "If object is not live, ctx and tams should be non-null");
1410             obj_addr = ctx->get_next_marked_addr(obj_addr, tams);
1411           }
1412         }
1413       } // else, we ignore humongous continuation region
1414     } // else, this is not an OLD region so we ignore it
1415   } // all regions have been processed
1416 }
1417 
1418 void ShenandoahVerifier::verify_rem_set_after_full_gc() {
1419   shenandoah_assert_safepoint();
1420   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1421 
1422   ShenandoahRegionIterator iterator;
1423 
1424   while (iterator.has_next()) {
1425     ShenandoahHeapRegion* r = iterator.next();
1426     if (r == nullptr) {
1427       // TODO: Can this really happen?
1428       break;
1429     }
1430     if (r->is_old() && !r->is_cset()) {
1431       help_verify_region_rem_set(r, nullptr, r->bottom(), r->top(), r->top(), "Remembered set violation at end of Full GC");
1432     }
1433   }
1434 }
1435 
1436 // Assure that the remember set has a dirty card everywhere there is an interesting pointer.  Even though
1437 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered
1438 // set should be valid through top.  This examines the write_card_table between bottom() and top() because
1439 // all PLABS are retired immediately before the start of update refs.
1440 void ShenandoahVerifier::verify_rem_set_before_update_ref() {
1441   shenandoah_assert_safepoint();
1442   assert(_heap->mode()->is_generational(), "Only verify remembered set for generational operational modes");
1443 
1444   ShenandoahRegionIterator iterator;
1445   ShenandoahMarkingContext* ctx;
1446 
1447   if (_heap->is_old_bitmap_stable() || _heap->active_generation()->is_global()) {
1448     ctx = _heap->complete_marking_context();
1449   } else {
1450     ctx = nullptr;
1451   }
1452 
1453   while (iterator.has_next()) {
1454     ShenandoahHeapRegion* r = iterator.next();
1455     if (r == nullptr) {
1456       // TODO: Can this really happen?
1457       break;
1458     }
1459     if (r->is_old() && !r->is_cset()) {
1460       help_verify_region_rem_set(r, ctx, r->bottom(), r->top(), r->get_update_watermark(),
1461                                  "Remembered set violation at init-update-references");
1462     }
1463   }
1464 }