1 /* 2 * Copyright (c) 2017, 2021, Red Hat, Inc. All rights reserved. 3 * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "gc/shared/tlab_globals.hpp" 28 #include "gc/shenandoah/shenandoahAsserts.hpp" 29 #include "gc/shenandoah/shenandoahForwarding.inline.hpp" 30 #include "gc/shenandoah/shenandoahPhaseTimings.hpp" 31 #include "gc/shenandoah/shenandoahGeneration.hpp" 32 #include "gc/shenandoah/shenandoahHeap.inline.hpp" 33 #include "gc/shenandoah/shenandoahHeapRegion.inline.hpp" 34 #include "gc/shenandoah/shenandoahOldGeneration.hpp" 35 #include "gc/shenandoah/shenandoahRootProcessor.hpp" 36 #include "gc/shenandoah/shenandoahScanRemembered.inline.hpp" 37 #include "gc/shenandoah/shenandoahTaskqueue.inline.hpp" 38 #include "gc/shenandoah/shenandoahUtils.hpp" 39 #include "gc/shenandoah/shenandoahVerifier.hpp" 40 #include "gc/shenandoah/shenandoahYoungGeneration.hpp" 41 #include "memory/allocation.hpp" 42 #include "memory/iterator.inline.hpp" 43 #include "memory/resourceArea.hpp" 44 #include "oops/compressedOops.inline.hpp" 45 #include "runtime/atomic.hpp" 46 #include "runtime/orderAccess.hpp" 47 #include "runtime/threads.hpp" 48 #include "utilities/align.hpp" 49 50 // Avoid name collision on verify_oop (defined in macroAssembler_arm.hpp) 51 #ifdef verify_oop 52 #undef verify_oop 53 #endif 54 55 static bool is_instance_ref_klass(Klass* k) { 56 return k->is_instance_klass() && InstanceKlass::cast(k)->reference_type() != REF_NONE; 57 } 58 59 class ShenandoahIgnoreReferenceDiscoverer : public ReferenceDiscoverer { 60 public: 61 virtual bool discover_reference(oop obj, ReferenceType type) { 62 return true; 63 } 64 }; 65 66 class ShenandoahVerifyOopClosure : public BasicOopIterateClosure { 67 private: 68 const char* _phase; 69 ShenandoahVerifier::VerifyOptions _options; 70 ShenandoahVerifierStack* _stack; 71 ShenandoahHeap* _heap; 72 MarkBitMap* _map; 73 ShenandoahLivenessData* _ld; 74 void* _interior_loc; 75 oop _loc; 76 ShenandoahGeneration* _generation; 77 78 public: 79 ShenandoahVerifyOopClosure(ShenandoahVerifierStack* stack, MarkBitMap* map, ShenandoahLivenessData* ld, 80 const char* phase, ShenandoahVerifier::VerifyOptions options) : 81 _phase(phase), 82 _options(options), 83 _stack(stack), 84 _heap(ShenandoahHeap::heap()), 85 _map(map), 86 _ld(ld), 87 _interior_loc(nullptr), 88 _loc(nullptr), 89 _generation(nullptr) { 90 if (options._verify_marked == ShenandoahVerifier::_verify_marked_complete_except_references || 91 options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty || 92 options._verify_marked == ShenandoahVerifier::_verify_marked_disable) { 93 set_ref_discoverer_internal(new ShenandoahIgnoreReferenceDiscoverer()); 94 } 95 96 if (_heap->mode()->is_generational()) { 97 _generation = _heap->gc_generation(); 98 assert(_generation != nullptr, "Expected active generation in this mode"); 99 shenandoah_assert_generations_reconciled(); 100 } 101 } 102 103 private: 104 void check(ShenandoahAsserts::SafeLevel level, oop obj, bool test, const char* label) { 105 if (!test) { 106 ShenandoahAsserts::print_failure(level, obj, _interior_loc, _loc, _phase, label, __FILE__, __LINE__); 107 } 108 } 109 110 template <class T> 111 void do_oop_work(T* p) { 112 T o = RawAccess<>::oop_load(p); 113 if (!CompressedOops::is_null(o)) { 114 oop obj = CompressedOops::decode_not_null(o); 115 if (is_instance_ref_klass(obj->klass())) { 116 obj = ShenandoahForwarding::get_forwardee(obj); 117 } 118 // Single threaded verification can use faster non-atomic stack and bitmap 119 // methods. 120 // 121 // For performance reasons, only fully verify non-marked field values. 122 // We are here when the host object for *p is already marked. 123 if (in_generation(obj) && _map->par_mark(obj)) { 124 verify_oop_at(p, obj); 125 _stack->push(ShenandoahVerifierTask(obj)); 126 } 127 } 128 } 129 130 bool in_generation(oop obj) { 131 if (_generation == nullptr) { 132 return true; 133 } 134 135 ShenandoahHeapRegion* region = _heap->heap_region_containing(obj); 136 return _generation->contains(region); 137 } 138 139 void verify_oop(oop obj) { 140 // Perform consistency checks with gradually decreasing safety level. This guarantees 141 // that failure report would not try to touch something that was not yet verified to be 142 // safe to process. 143 144 check(ShenandoahAsserts::_safe_unknown, obj, _heap->is_in(obj), 145 "oop must be in heap"); 146 check(ShenandoahAsserts::_safe_unknown, obj, is_object_aligned(obj), 147 "oop must be aligned"); 148 149 ShenandoahHeapRegion *obj_reg = _heap->heap_region_containing(obj); 150 Klass* obj_klass = obj->klass_or_null(); 151 152 // Verify that obj is not in dead space: 153 { 154 // Do this before touching obj->size() 155 check(ShenandoahAsserts::_safe_unknown, obj, obj_klass != nullptr, 156 "Object klass pointer should not be null"); 157 check(ShenandoahAsserts::_safe_unknown, obj, Metaspace::contains(obj_klass), 158 "Object klass pointer must go to metaspace"); 159 160 HeapWord *obj_addr = cast_from_oop<HeapWord*>(obj); 161 check(ShenandoahAsserts::_safe_unknown, obj, obj_addr < obj_reg->top(), 162 "Object start should be within the region"); 163 164 if (!obj_reg->is_humongous()) { 165 check(ShenandoahAsserts::_safe_unknown, obj, (obj_addr + obj->size()) <= obj_reg->top(), 166 "Object end should be within the region"); 167 } else { 168 size_t humongous_start = obj_reg->index(); 169 size_t humongous_end = humongous_start + (obj->size() >> ShenandoahHeapRegion::region_size_words_shift()); 170 for (size_t idx = humongous_start + 1; idx < humongous_end; idx++) { 171 check(ShenandoahAsserts::_safe_unknown, obj, _heap->get_region(idx)->is_humongous_continuation(), 172 "Humongous object is in continuation that fits it"); 173 } 174 } 175 176 // ------------ obj is safe at this point -------------- 177 178 check(ShenandoahAsserts::_safe_oop, obj, obj_reg->is_active(), 179 "Object should be in active region"); 180 181 switch (_options._verify_liveness) { 182 case ShenandoahVerifier::_verify_liveness_disable: 183 // skip 184 break; 185 case ShenandoahVerifier::_verify_liveness_complete: 186 Atomic::add(&_ld[obj_reg->index()], (uint) obj->size(), memory_order_relaxed); 187 // fallthrough for fast failure for un-live regions: 188 case ShenandoahVerifier::_verify_liveness_conservative: 189 check(ShenandoahAsserts::_safe_oop, obj, obj_reg->has_live() || 190 (obj_reg->is_old() && _heap->gc_generation()->is_young()), 191 "Object must belong to region with live data"); 192 shenandoah_assert_generations_reconciled(); 193 break; 194 default: 195 assert(false, "Unhandled liveness verification"); 196 } 197 } 198 199 oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj); 200 201 ShenandoahHeapRegion* fwd_reg = nullptr; 202 203 if (obj != fwd) { 204 check(ShenandoahAsserts::_safe_oop, obj, _heap->is_in(fwd), 205 "Forwardee must be in heap"); 206 check(ShenandoahAsserts::_safe_oop, obj, !CompressedOops::is_null(fwd), 207 "Forwardee is set"); 208 check(ShenandoahAsserts::_safe_oop, obj, is_object_aligned(fwd), 209 "Forwardee must be aligned"); 210 211 // Do this before touching fwd->size() 212 Klass* fwd_klass = fwd->klass_or_null(); 213 check(ShenandoahAsserts::_safe_oop, obj, fwd_klass != nullptr, 214 "Forwardee klass pointer should not be null"); 215 check(ShenandoahAsserts::_safe_oop, obj, Metaspace::contains(fwd_klass), 216 "Forwardee klass pointer must go to metaspace"); 217 check(ShenandoahAsserts::_safe_oop, obj, obj_klass == fwd_klass, 218 "Forwardee klass pointer must go to metaspace"); 219 220 fwd_reg = _heap->heap_region_containing(fwd); 221 222 // Verify that forwardee is not in the dead space: 223 check(ShenandoahAsserts::_safe_oop, obj, !fwd_reg->is_humongous(), 224 "Should have no humongous forwardees"); 225 226 HeapWord *fwd_addr = cast_from_oop<HeapWord *>(fwd); 227 check(ShenandoahAsserts::_safe_oop, obj, fwd_addr < fwd_reg->top(), 228 "Forwardee start should be within the region"); 229 check(ShenandoahAsserts::_safe_oop, obj, (fwd_addr + fwd->size()) <= fwd_reg->top(), 230 "Forwardee end should be within the region"); 231 232 oop fwd2 = ShenandoahForwarding::get_forwardee_raw_unchecked(fwd); 233 check(ShenandoahAsserts::_safe_oop, obj, (fwd == fwd2), 234 "Double forwarding"); 235 } else { 236 fwd_reg = obj_reg; 237 } 238 239 // ------------ obj and fwd are safe at this point -------------- 240 switch (_options._verify_marked) { 241 case ShenandoahVerifier::_verify_marked_disable: 242 // skip 243 break; 244 case ShenandoahVerifier::_verify_marked_incomplete: 245 check(ShenandoahAsserts::_safe_all, obj, _heap->marking_context()->is_marked(obj), 246 "Must be marked in incomplete bitmap"); 247 break; 248 case ShenandoahVerifier::_verify_marked_complete: 249 check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked(obj), 250 "Must be marked in complete bitmap"); 251 break; 252 case ShenandoahVerifier::_verify_marked_complete_except_references: 253 case ShenandoahVerifier::_verify_marked_complete_satb_empty: 254 check(ShenandoahAsserts::_safe_all, obj, _heap->complete_marking_context()->is_marked(obj), 255 "Must be marked in complete bitmap, except j.l.r.Reference referents"); 256 break; 257 default: 258 assert(false, "Unhandled mark verification"); 259 } 260 261 switch (_options._verify_forwarded) { 262 case ShenandoahVerifier::_verify_forwarded_disable: 263 // skip 264 break; 265 case ShenandoahVerifier::_verify_forwarded_none: { 266 check(ShenandoahAsserts::_safe_all, obj, (obj == fwd), 267 "Should not be forwarded"); 268 break; 269 } 270 case ShenandoahVerifier::_verify_forwarded_allow: { 271 if (obj != fwd) { 272 check(ShenandoahAsserts::_safe_all, obj, obj_reg != fwd_reg, 273 "Forwardee should be in another region"); 274 } 275 break; 276 } 277 default: 278 assert(false, "Unhandled forwarding verification"); 279 } 280 281 switch (_options._verify_cset) { 282 case ShenandoahVerifier::_verify_cset_disable: 283 // skip 284 break; 285 case ShenandoahVerifier::_verify_cset_none: 286 check(ShenandoahAsserts::_safe_all, obj, !_heap->in_collection_set(obj), 287 "Should not have references to collection set"); 288 break; 289 case ShenandoahVerifier::_verify_cset_forwarded: 290 if (_heap->in_collection_set(obj)) { 291 check(ShenandoahAsserts::_safe_all, obj, (obj != fwd), 292 "Object in collection set, should have forwardee"); 293 } 294 break; 295 default: 296 assert(false, "Unhandled cset verification"); 297 } 298 299 } 300 301 public: 302 /** 303 * Verify object with known interior reference. 304 * @param p interior reference where the object is referenced from; can be off-heap 305 * @param obj verified object 306 */ 307 template <class T> 308 void verify_oop_at(T* p, oop obj) { 309 _interior_loc = p; 310 verify_oop(obj); 311 _interior_loc = nullptr; 312 } 313 314 /** 315 * Verify object without known interior reference. 316 * Useful when picking up the object at known offset in heap, 317 * but without knowing what objects reference it. 318 * @param obj verified object 319 */ 320 void verify_oop_standalone(oop obj) { 321 _interior_loc = nullptr; 322 verify_oop(obj); 323 _interior_loc = nullptr; 324 } 325 326 /** 327 * Verify oop fields from this object. 328 * @param obj host object for verified fields 329 */ 330 void verify_oops_from(oop obj) { 331 _loc = obj; 332 obj->oop_iterate(this); 333 _loc = nullptr; 334 } 335 336 void do_oop(oop* p) override { do_oop_work(p); } 337 void do_oop(narrowOop* p) override { do_oop_work(p); } 338 }; 339 340 // This closure computes the amounts of used, committed, and garbage memory and the number of regions contained within 341 // a subset (e.g. the young generation or old generation) of the total heap. 342 class ShenandoahCalculateRegionStatsClosure : public ShenandoahHeapRegionClosure { 343 private: 344 size_t _used, _committed, _garbage, _regions, _humongous_waste, _trashed_regions; 345 public: 346 ShenandoahCalculateRegionStatsClosure() : 347 _used(0), _committed(0), _garbage(0), _regions(0), _humongous_waste(0), _trashed_regions(0) {}; 348 349 void heap_region_do(ShenandoahHeapRegion* r) override { 350 _used += r->used(); 351 _garbage += r->garbage(); 352 _committed += r->is_committed() ? ShenandoahHeapRegion::region_size_bytes() : 0; 353 if (r->is_humongous()) { 354 _humongous_waste += r->free(); 355 } 356 if (r->is_trash()) { 357 _trashed_regions++; 358 } 359 _regions++; 360 log_debug(gc)("ShenandoahCalculateRegionStatsClosure: adding " SIZE_FORMAT " for %s Region " SIZE_FORMAT ", yielding: " SIZE_FORMAT, 361 r->used(), (r->is_humongous() ? "humongous" : "regular"), r->index(), _used); 362 } 363 364 size_t used() const { return _used; } 365 size_t committed() const { return _committed; } 366 size_t garbage() const { return _garbage; } 367 size_t regions() const { return _regions; } 368 size_t waste() const { return _humongous_waste; } 369 370 // span is the total memory affiliated with these stats (some of which is in use and other is available) 371 size_t span() const { return _regions * ShenandoahHeapRegion::region_size_bytes(); } 372 size_t non_trashed_span() const { return (_regions - _trashed_regions) * ShenandoahHeapRegion::region_size_bytes(); } 373 }; 374 375 class ShenandoahGenerationStatsClosure : public ShenandoahHeapRegionClosure { 376 public: 377 ShenandoahCalculateRegionStatsClosure old; 378 ShenandoahCalculateRegionStatsClosure young; 379 ShenandoahCalculateRegionStatsClosure global; 380 381 void heap_region_do(ShenandoahHeapRegion* r) override { 382 switch (r->affiliation()) { 383 case FREE: 384 return; 385 case YOUNG_GENERATION: 386 young.heap_region_do(r); 387 global.heap_region_do(r); 388 break; 389 case OLD_GENERATION: 390 old.heap_region_do(r); 391 global.heap_region_do(r); 392 break; 393 default: 394 ShouldNotReachHere(); 395 } 396 } 397 398 static void log_usage(ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) { 399 log_debug(gc)("Safepoint verification: %s verified usage: " SIZE_FORMAT "%s, recorded usage: " SIZE_FORMAT "%s", 400 generation->name(), 401 byte_size_in_proper_unit(generation->used()), proper_unit_for_byte_size(generation->used()), 402 byte_size_in_proper_unit(stats.used()), proper_unit_for_byte_size(stats.used())); 403 } 404 405 static void validate_usage(const bool adjust_for_padding, 406 const char* label, ShenandoahGeneration* generation, ShenandoahCalculateRegionStatsClosure& stats) { 407 ShenandoahHeap* heap = ShenandoahHeap::heap(); 408 size_t generation_used = generation->used(); 409 size_t generation_used_regions = generation->used_regions(); 410 if (adjust_for_padding && (generation->is_young() || generation->is_global())) { 411 size_t pad = heap->old_generation()->get_pad_for_promote_in_place(); 412 generation_used += pad; 413 } 414 415 guarantee(stats.used() == generation_used, 416 "%s: generation (%s) used size must be consistent: generation-used: " PROPERFMT ", regions-used: " PROPERFMT, 417 label, generation->name(), PROPERFMTARGS(generation_used), PROPERFMTARGS(stats.used())); 418 419 guarantee(stats.regions() == generation_used_regions, 420 "%s: generation (%s) used regions (" SIZE_FORMAT ") must equal regions that are in use (" SIZE_FORMAT ")", 421 label, generation->name(), generation->used_regions(), stats.regions()); 422 423 size_t generation_capacity = generation->max_capacity(); 424 guarantee(stats.non_trashed_span() <= generation_capacity, 425 "%s: generation (%s) size spanned by regions (" SIZE_FORMAT ") * region size (" PROPERFMT 426 ") must not exceed current capacity (" PROPERFMT ")", 427 label, generation->name(), stats.regions(), PROPERFMTARGS(ShenandoahHeapRegion::region_size_bytes()), 428 PROPERFMTARGS(generation_capacity)); 429 430 size_t humongous_waste = generation->get_humongous_waste(); 431 guarantee(stats.waste() == humongous_waste, 432 "%s: generation (%s) humongous waste must be consistent: generation: " PROPERFMT ", regions: " PROPERFMT, 433 label, generation->name(), PROPERFMTARGS(humongous_waste), PROPERFMTARGS(stats.waste())); 434 } 435 }; 436 437 class ShenandoahVerifyHeapRegionClosure : public ShenandoahHeapRegionClosure { 438 private: 439 ShenandoahHeap* _heap; 440 const char* _phase; 441 ShenandoahVerifier::VerifyRegions _regions; 442 public: 443 ShenandoahVerifyHeapRegionClosure(const char* phase, ShenandoahVerifier::VerifyRegions regions) : 444 _heap(ShenandoahHeap::heap()), 445 _phase(phase), 446 _regions(regions) {}; 447 448 void print_failure(ShenandoahHeapRegion* r, const char* label) { 449 ResourceMark rm; 450 451 ShenandoahMessageBuffer msg("Shenandoah verification failed; %s: %s\n\n", _phase, label); 452 453 stringStream ss; 454 r->print_on(&ss); 455 msg.append("%s", ss.as_string()); 456 457 report_vm_error(__FILE__, __LINE__, msg.buffer()); 458 } 459 460 void verify(ShenandoahHeapRegion* r, bool test, const char* msg) { 461 if (!test) { 462 print_failure(r, msg); 463 } 464 } 465 466 void heap_region_do(ShenandoahHeapRegion* r) override { 467 switch (_regions) { 468 case ShenandoahVerifier::_verify_regions_disable: 469 break; 470 case ShenandoahVerifier::_verify_regions_notrash: 471 verify(r, !r->is_trash(), 472 "Should not have trash regions"); 473 break; 474 case ShenandoahVerifier::_verify_regions_nocset: 475 verify(r, !r->is_cset(), 476 "Should not have cset regions"); 477 break; 478 case ShenandoahVerifier::_verify_regions_notrash_nocset: 479 verify(r, !r->is_trash(), 480 "Should not have trash regions"); 481 verify(r, !r->is_cset(), 482 "Should not have cset regions"); 483 break; 484 default: 485 ShouldNotReachHere(); 486 } 487 488 verify(r, r->capacity() == ShenandoahHeapRegion::region_size_bytes(), 489 "Capacity should match region size"); 490 491 verify(r, r->bottom() <= r->top(), 492 "Region top should not be less than bottom"); 493 494 verify(r, r->bottom() <= _heap->marking_context()->top_at_mark_start(r), 495 "Region TAMS should not be less than bottom"); 496 497 verify(r, _heap->marking_context()->top_at_mark_start(r) <= r->top(), 498 "Complete TAMS should not be larger than top"); 499 500 verify(r, r->get_live_data_bytes() <= r->capacity(), 501 "Live data cannot be larger than capacity"); 502 503 verify(r, r->garbage() <= r->capacity(), 504 "Garbage cannot be larger than capacity"); 505 506 verify(r, r->used() <= r->capacity(), 507 "Used cannot be larger than capacity"); 508 509 verify(r, r->get_shared_allocs() <= r->capacity(), 510 "Shared alloc count should not be larger than capacity"); 511 512 verify(r, r->get_tlab_allocs() <= r->capacity(), 513 "TLAB alloc count should not be larger than capacity"); 514 515 verify(r, r->get_gclab_allocs() <= r->capacity(), 516 "GCLAB alloc count should not be larger than capacity"); 517 518 verify(r, r->get_plab_allocs() <= r->capacity(), 519 "PLAB alloc count should not be larger than capacity"); 520 521 verify(r, r->get_shared_allocs() + r->get_tlab_allocs() + r->get_gclab_allocs() + r->get_plab_allocs() == r->used(), 522 "Accurate accounting: shared + TLAB + GCLAB + PLAB = used"); 523 524 verify(r, !r->is_empty() || !r->has_live(), 525 "Empty regions should not have live data"); 526 527 verify(r, r->is_cset() == _heap->collection_set()->is_in(r), 528 "Transitional: region flags and collection set agree"); 529 } 530 }; 531 532 class ShenandoahVerifierReachableTask : public WorkerTask { 533 private: 534 const char* _label; 535 ShenandoahVerifier::VerifyOptions _options; 536 ShenandoahHeap* _heap; 537 ShenandoahLivenessData* _ld; 538 MarkBitMap* _bitmap; 539 volatile size_t _processed; 540 541 public: 542 ShenandoahVerifierReachableTask(MarkBitMap* bitmap, 543 ShenandoahLivenessData* ld, 544 const char* label, 545 ShenandoahVerifier::VerifyOptions options) : 546 WorkerTask("Shenandoah Verifier Reachable Objects"), 547 _label(label), 548 _options(options), 549 _heap(ShenandoahHeap::heap()), 550 _ld(ld), 551 _bitmap(bitmap), 552 _processed(0) {}; 553 554 size_t processed() const { 555 return _processed; 556 } 557 558 void work(uint worker_id) override { 559 ResourceMark rm; 560 ShenandoahVerifierStack stack; 561 562 // On level 2, we need to only check the roots once. 563 // On level 3, we want to check the roots, and seed the local stack. 564 // It is a lesser evil to accept multiple root scans at level 3, because 565 // extended parallelism would buy us out. 566 if (((ShenandoahVerifyLevel == 2) && (worker_id == 0)) 567 || (ShenandoahVerifyLevel >= 3)) { 568 ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld, 569 ShenandoahMessageBuffer("%s, Roots", _label), 570 _options); 571 if (_heap->unload_classes()) { 572 ShenandoahRootVerifier::strong_roots_do(&cl); 573 } else { 574 ShenandoahRootVerifier::roots_do(&cl); 575 } 576 } 577 578 size_t processed = 0; 579 580 if (ShenandoahVerifyLevel >= 3) { 581 ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld, 582 ShenandoahMessageBuffer("%s, Reachable", _label), 583 _options); 584 while (!stack.is_empty()) { 585 processed++; 586 ShenandoahVerifierTask task = stack.pop(); 587 cl.verify_oops_from(task.obj()); 588 } 589 } 590 591 Atomic::add(&_processed, processed, memory_order_relaxed); 592 } 593 }; 594 595 class ShenandoahVerifyNoIncompleteSatbBuffers : public ThreadClosure { 596 public: 597 void do_thread(Thread* thread) override { 598 SATBMarkQueue& queue = ShenandoahThreadLocalData::satb_mark_queue(thread); 599 if (!is_empty(queue)) { 600 fatal("All SATB buffers should have been flushed during mark"); 601 } 602 } 603 private: 604 bool is_empty(SATBMarkQueue& queue) { 605 return queue.buffer() == nullptr || queue.index() == queue.capacity(); 606 } 607 }; 608 609 class ShenandoahVerifierMarkedRegionTask : public WorkerTask { 610 private: 611 const char* _label; 612 ShenandoahVerifier::VerifyOptions _options; 613 ShenandoahHeap *_heap; 614 MarkBitMap* _bitmap; 615 ShenandoahLivenessData* _ld; 616 volatile size_t _claimed; 617 volatile size_t _processed; 618 ShenandoahGeneration* _generation; 619 620 public: 621 ShenandoahVerifierMarkedRegionTask(MarkBitMap* bitmap, 622 ShenandoahLivenessData* ld, 623 const char* label, 624 ShenandoahVerifier::VerifyOptions options) : 625 WorkerTask("Shenandoah Verifier Marked Objects"), 626 _label(label), 627 _options(options), 628 _heap(ShenandoahHeap::heap()), 629 _bitmap(bitmap), 630 _ld(ld), 631 _claimed(0), 632 _processed(0), 633 _generation(nullptr) { 634 if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) { 635 Threads::change_thread_claim_token(); 636 } 637 638 if (_heap->mode()->is_generational()) { 639 _generation = _heap->gc_generation(); 640 assert(_generation != nullptr, "Expected active generation in this mode."); 641 shenandoah_assert_generations_reconciled(); 642 } 643 }; 644 645 size_t processed() { 646 return Atomic::load(&_processed); 647 } 648 649 void work(uint worker_id) override { 650 if (_options._verify_marked == ShenandoahVerifier::_verify_marked_complete_satb_empty) { 651 ShenandoahVerifyNoIncompleteSatbBuffers verify_satb; 652 Threads::possibly_parallel_threads_do(true, &verify_satb); 653 } 654 655 ShenandoahVerifierStack stack; 656 ShenandoahVerifyOopClosure cl(&stack, _bitmap, _ld, 657 ShenandoahMessageBuffer("%s, Marked", _label), 658 _options); 659 660 while (true) { 661 size_t v = Atomic::fetch_then_add(&_claimed, 1u, memory_order_relaxed); 662 if (v < _heap->num_regions()) { 663 ShenandoahHeapRegion* r = _heap->get_region(v); 664 if (!in_generation(r)) { 665 continue; 666 } 667 668 if (!r->is_humongous() && !r->is_trash()) { 669 work_regular(r, stack, cl); 670 } else if (r->is_humongous_start()) { 671 work_humongous(r, stack, cl); 672 } 673 } else { 674 break; 675 } 676 } 677 } 678 679 bool in_generation(ShenandoahHeapRegion* r) { 680 return _generation == nullptr || _generation->contains(r); 681 } 682 683 virtual void work_humongous(ShenandoahHeapRegion *r, ShenandoahVerifierStack& stack, ShenandoahVerifyOopClosure& cl) { 684 size_t processed = 0; 685 HeapWord* obj = r->bottom(); 686 if (_heap->complete_marking_context()->is_marked(cast_to_oop(obj))) { 687 verify_and_follow(obj, stack, cl, &processed); 688 } 689 Atomic::add(&_processed, processed, memory_order_relaxed); 690 } 691 692 virtual void work_regular(ShenandoahHeapRegion *r, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl) { 693 size_t processed = 0; 694 ShenandoahMarkingContext* ctx = _heap->complete_marking_context(); 695 HeapWord* tams = ctx->top_at_mark_start(r); 696 697 // Bitmaps, before TAMS 698 if (tams > r->bottom()) { 699 HeapWord* start = r->bottom(); 700 HeapWord* addr = ctx->get_next_marked_addr(start, tams); 701 702 while (addr < tams) { 703 verify_and_follow(addr, stack, cl, &processed); 704 addr += 1; 705 if (addr < tams) { 706 addr = ctx->get_next_marked_addr(addr, tams); 707 } 708 } 709 } 710 711 // Size-based, after TAMS 712 { 713 HeapWord* limit = r->top(); 714 HeapWord* addr = tams; 715 716 while (addr < limit) { 717 verify_and_follow(addr, stack, cl, &processed); 718 addr += cast_to_oop(addr)->size(); 719 } 720 } 721 722 Atomic::add(&_processed, processed, memory_order_relaxed); 723 } 724 725 void verify_and_follow(HeapWord *addr, ShenandoahVerifierStack &stack, ShenandoahVerifyOopClosure &cl, size_t *processed) { 726 if (!_bitmap->par_mark(addr)) return; 727 728 // Verify the object itself: 729 oop obj = cast_to_oop(addr); 730 cl.verify_oop_standalone(obj); 731 732 // Verify everything reachable from that object too, hopefully realizing 733 // everything was already marked, and never touching further: 734 if (!is_instance_ref_klass(obj->klass())) { 735 cl.verify_oops_from(obj); 736 (*processed)++; 737 } 738 while (!stack.is_empty()) { 739 ShenandoahVerifierTask task = stack.pop(); 740 cl.verify_oops_from(task.obj()); 741 (*processed)++; 742 } 743 } 744 }; 745 746 class VerifyThreadGCState : public ThreadClosure { 747 private: 748 const char* const _label; 749 char const _expected; 750 751 public: 752 VerifyThreadGCState(const char* label, char expected) : _label(label), _expected(expected) {} 753 void do_thread(Thread* t) override { 754 char actual = ShenandoahThreadLocalData::gc_state(t); 755 if (!verify_gc_state(actual, _expected)) { 756 fatal("%s: Thread %s: expected gc-state %d, actual %d", _label, t->name(), _expected, actual); 757 } 758 } 759 760 static bool verify_gc_state(char actual, char expected) { 761 // Old generation marking is allowed in all states. 762 if (ShenandoahHeap::heap()->mode()->is_generational()) { 763 return ((actual & ~(ShenandoahHeap::OLD_MARKING | ShenandoahHeap::MARKING)) == expected); 764 } else { 765 assert((actual & ShenandoahHeap::OLD_MARKING) == 0, "Should not mark old in non-generational mode"); 766 return (actual == expected); 767 } 768 } 769 }; 770 771 void ShenandoahVerifier::verify_at_safepoint(const char* label, 772 VerifyRememberedSet remembered, 773 VerifyForwarded forwarded, VerifyMarked marked, 774 VerifyCollectionSet cset, 775 VerifyLiveness liveness, VerifyRegions regions, 776 VerifySize sizeness, 777 VerifyGCState gcstate) { 778 guarantee(ShenandoahSafepoint::is_at_shenandoah_safepoint(), "only when nothing else happens"); 779 guarantee(ShenandoahVerify, "only when enabled, and bitmap is initialized in ShenandoahHeap::initialize"); 780 781 ShenandoahHeap::heap()->propagate_gc_state_to_java_threads(); 782 783 // Avoid side-effect of changing workers' active thread count, but bypass concurrent/parallel protocol check 784 ShenandoahPushWorkerScope verify_worker_scope(_heap->workers(), _heap->max_workers(), false /*bypass check*/); 785 786 log_info(gc,start)("Verify %s, Level " INTX_FORMAT, label, ShenandoahVerifyLevel); 787 788 // GC state checks 789 { 790 char expected = -1; 791 bool enabled; 792 switch (gcstate) { 793 case _verify_gcstate_disable: 794 enabled = false; 795 break; 796 case _verify_gcstate_forwarded: 797 enabled = true; 798 expected = ShenandoahHeap::HAS_FORWARDED; 799 break; 800 case _verify_gcstate_evacuation: 801 enabled = true; 802 expected = ShenandoahHeap::EVACUATION; 803 if (!_heap->is_stw_gc_in_progress()) { 804 // Only concurrent GC sets this. 805 expected |= ShenandoahHeap::WEAK_ROOTS; 806 } 807 break; 808 case _verify_gcstate_updating: 809 enabled = true; 810 expected = ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::UPDATEREFS; 811 break; 812 case _verify_gcstate_stable: 813 enabled = true; 814 expected = ShenandoahHeap::STABLE; 815 break; 816 case _verify_gcstate_stable_weakroots: 817 enabled = true; 818 expected = ShenandoahHeap::STABLE; 819 if (!_heap->is_stw_gc_in_progress()) { 820 // Only concurrent GC sets this. 821 expected |= ShenandoahHeap::WEAK_ROOTS; 822 } 823 break; 824 default: 825 enabled = false; 826 assert(false, "Unhandled gc-state verification"); 827 } 828 829 if (enabled) { 830 char actual = _heap->gc_state(); 831 832 bool is_marking = (actual & ShenandoahHeap::MARKING); 833 bool is_marking_young_or_old = (actual & (ShenandoahHeap::YOUNG_MARKING | ShenandoahHeap::OLD_MARKING)); 834 assert(is_marking == is_marking_young_or_old, "MARKING iff (YOUNG_MARKING or OLD_MARKING), gc_state is: %x", actual); 835 836 // Old generation marking is allowed in all states. 837 if (!VerifyThreadGCState::verify_gc_state(actual, expected)) { 838 fatal("%s: Global gc-state: expected %d, actual %d", label, expected, actual); 839 } 840 841 VerifyThreadGCState vtgcs(label, expected); 842 Threads::java_threads_do(&vtgcs); 843 } 844 } 845 846 // Deactivate barriers temporarily: Verifier wants plain heap accesses 847 ShenandoahGCStateResetter resetter; 848 849 // Heap size checks 850 { 851 ShenandoahHeapLocker lock(_heap->lock()); 852 853 ShenandoahCalculateRegionStatsClosure cl; 854 _heap->heap_region_iterate(&cl); 855 size_t heap_used; 856 if (_heap->mode()->is_generational() && (sizeness == _verify_size_adjusted_for_padding)) { 857 // Prior to evacuation, regular regions that are to be evacuated in place are padded to prevent further allocations 858 heap_used = _heap->used() + _heap->old_generation()->get_pad_for_promote_in_place(); 859 } else if (sizeness != _verify_size_disable) { 860 heap_used = _heap->used(); 861 } 862 if (sizeness != _verify_size_disable) { 863 guarantee(cl.used() == heap_used, 864 "%s: heap used size must be consistent: heap-used = " SIZE_FORMAT "%s, regions-used = " SIZE_FORMAT "%s", 865 label, 866 byte_size_in_proper_unit(heap_used), proper_unit_for_byte_size(heap_used), 867 byte_size_in_proper_unit(cl.used()), proper_unit_for_byte_size(cl.used())); 868 } 869 size_t heap_committed = _heap->committed(); 870 guarantee(cl.committed() == heap_committed, 871 "%s: heap committed size must be consistent: heap-committed = " SIZE_FORMAT "%s, regions-committed = " SIZE_FORMAT "%s", 872 label, 873 byte_size_in_proper_unit(heap_committed), proper_unit_for_byte_size(heap_committed), 874 byte_size_in_proper_unit(cl.committed()), proper_unit_for_byte_size(cl.committed())); 875 } 876 877 log_debug(gc)("Safepoint verification finished heap usage verification"); 878 879 ShenandoahGeneration* generation; 880 if (_heap->mode()->is_generational()) { 881 generation = _heap->gc_generation(); 882 guarantee(generation != nullptr, "Need to know which generation to verify."); 883 shenandoah_assert_generations_reconciled(); 884 } else { 885 generation = nullptr; 886 } 887 888 if (generation != nullptr) { 889 ShenandoahHeapLocker lock(_heap->lock()); 890 891 switch (remembered) { 892 case _verify_remembered_disable: 893 break; 894 case _verify_remembered_before_marking: 895 log_debug(gc)("Safepoint verification of remembered set at mark"); 896 verify_rem_set_before_mark(); 897 break; 898 case _verify_remembered_before_updating_references: 899 log_debug(gc)("Safepoint verification of remembered set at update ref"); 900 verify_rem_set_before_update_ref(); 901 break; 902 case _verify_remembered_after_full_gc: 903 log_debug(gc)("Safepoint verification of remembered set after full gc"); 904 verify_rem_set_after_full_gc(); 905 break; 906 default: 907 fatal("Unhandled remembered set verification mode"); 908 } 909 910 ShenandoahGenerationStatsClosure cl; 911 _heap->heap_region_iterate(&cl); 912 913 if (LogTarget(Debug, gc)::is_enabled()) { 914 ShenandoahGenerationStatsClosure::log_usage(_heap->old_generation(), cl.old); 915 ShenandoahGenerationStatsClosure::log_usage(_heap->young_generation(), cl.young); 916 ShenandoahGenerationStatsClosure::log_usage(_heap->global_generation(), cl.global); 917 } 918 if (sizeness == _verify_size_adjusted_for_padding) { 919 ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old); 920 ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->young_generation(), cl.young); 921 ShenandoahGenerationStatsClosure::validate_usage(true, label, _heap->global_generation(), cl.global); 922 } else if (sizeness == _verify_size_exact) { 923 ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->old_generation(), cl.old); 924 ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->young_generation(), cl.young); 925 ShenandoahGenerationStatsClosure::validate_usage(false, label, _heap->global_generation(), cl.global); 926 } 927 // else: sizeness must equal _verify_size_disable 928 } 929 930 log_debug(gc)("Safepoint verification finished remembered set verification"); 931 932 // Internal heap region checks 933 if (ShenandoahVerifyLevel >= 1) { 934 ShenandoahVerifyHeapRegionClosure cl(label, regions); 935 if (generation != nullptr) { 936 generation->heap_region_iterate(&cl); 937 } else { 938 _heap->heap_region_iterate(&cl); 939 } 940 } 941 942 log_debug(gc)("Safepoint verification finished heap region closure verification"); 943 944 OrderAccess::fence(); 945 946 if (UseTLAB) { 947 _heap->labs_make_parsable(); 948 } 949 950 // Allocate temporary bitmap for storing marking wavefront: 951 _verification_bit_map->clear(); 952 953 // Allocate temporary array for storing liveness data 954 ShenandoahLivenessData* ld = NEW_C_HEAP_ARRAY(ShenandoahLivenessData, _heap->num_regions(), mtGC); 955 Copy::fill_to_bytes((void*)ld, _heap->num_regions()*sizeof(ShenandoahLivenessData), 0); 956 957 const VerifyOptions& options = ShenandoahVerifier::VerifyOptions(forwarded, marked, cset, liveness, regions, gcstate); 958 959 // Steps 1-2. Scan root set to get initial reachable set. Finish walking the reachable heap. 960 // This verifies what application can see, since it only cares about reachable objects. 961 size_t count_reachable = 0; 962 if (ShenandoahVerifyLevel >= 2) { 963 ShenandoahVerifierReachableTask task(_verification_bit_map, ld, label, options); 964 _heap->workers()->run_task(&task); 965 count_reachable = task.processed(); 966 } 967 968 log_debug(gc)("Safepoint verification finished getting initial reachable set"); 969 970 // Step 3. Walk marked objects. Marked objects might be unreachable. This verifies what collector, 971 // not the application, can see during the region scans. There is no reason to process the objects 972 // that were already verified, e.g. those marked in verification bitmap. There is interaction with TAMS: 973 // before TAMS, we verify the bitmaps, if available; after TAMS, we walk until the top(). It mimics 974 // what marked_object_iterate is doing, without calling into that optimized (and possibly incorrect) 975 // version 976 977 size_t count_marked = 0; 978 if (ShenandoahVerifyLevel >= 4 && 979 (marked == _verify_marked_complete || 980 marked == _verify_marked_complete_except_references || 981 marked == _verify_marked_complete_satb_empty)) { 982 guarantee(_heap->marking_context()->is_complete(), "Marking context should be complete"); 983 ShenandoahVerifierMarkedRegionTask task(_verification_bit_map, ld, label, options); 984 _heap->workers()->run_task(&task); 985 count_marked = task.processed(); 986 } else { 987 guarantee(ShenandoahVerifyLevel < 4 || marked == _verify_marked_incomplete || marked == _verify_marked_disable, "Should be"); 988 } 989 990 log_debug(gc)("Safepoint verification finished walking marked objects"); 991 992 // Step 4. Verify accumulated liveness data, if needed. Only reliable if verification level includes 993 // marked objects. 994 995 if (ShenandoahVerifyLevel >= 4 && marked == _verify_marked_complete && liveness == _verify_liveness_complete) { 996 for (size_t i = 0; i < _heap->num_regions(); i++) { 997 ShenandoahHeapRegion* r = _heap->get_region(i); 998 if (generation != nullptr && !generation->contains(r)) { 999 continue; 1000 } 1001 1002 juint verf_live = 0; 1003 if (r->is_humongous()) { 1004 // For humongous objects, test if start region is marked live, and if so, 1005 // all humongous regions in that chain have live data equal to their "used". 1006 juint start_live = Atomic::load(&ld[r->humongous_start_region()->index()]); 1007 if (start_live > 0) { 1008 verf_live = (juint)(r->used() / HeapWordSize); 1009 } 1010 } else { 1011 verf_live = Atomic::load(&ld[r->index()]); 1012 } 1013 1014 size_t reg_live = r->get_live_data_words(); 1015 if (reg_live != verf_live) { 1016 stringStream ss; 1017 r->print_on(&ss); 1018 fatal("%s: Live data should match: region-live = " SIZE_FORMAT ", verifier-live = " UINT32_FORMAT "\n%s", 1019 label, reg_live, verf_live, ss.freeze()); 1020 } 1021 } 1022 } 1023 1024 log_debug(gc)("Safepoint verification finished accumulation of liveness data"); 1025 1026 1027 log_info(gc)("Verify %s, Level " INTX_FORMAT " (" SIZE_FORMAT " reachable, " SIZE_FORMAT " marked)", 1028 label, ShenandoahVerifyLevel, count_reachable, count_marked); 1029 1030 FREE_C_HEAP_ARRAY(ShenandoahLivenessData, ld); 1031 } 1032 1033 void ShenandoahVerifier::verify_generic(VerifyOption vo) { 1034 verify_at_safepoint( 1035 "Generic Verification", 1036 _verify_remembered_disable, // do not verify remembered set 1037 _verify_forwarded_allow, // conservatively allow forwarded 1038 _verify_marked_disable, // do not verify marked: lots ot time wasted checking dead allocations 1039 _verify_cset_disable, // cset may be inconsistent 1040 _verify_liveness_disable, // no reliable liveness data 1041 _verify_regions_disable, // no reliable region data 1042 _verify_size_exact, // expect generation and heap sizes to match exactly 1043 _verify_gcstate_disable // no data about gcstate 1044 ); 1045 } 1046 1047 void ShenandoahVerifier::verify_before_concmark() { 1048 verify_at_safepoint( 1049 "Before Mark", 1050 _verify_remembered_before_marking, 1051 // verify read-only remembered set from bottom() to top() 1052 _verify_forwarded_none, // UR should have fixed up 1053 _verify_marked_disable, // do not verify marked: lots ot time wasted checking dead allocations 1054 _verify_cset_none, // UR should have fixed this 1055 _verify_liveness_disable, // no reliable liveness data 1056 _verify_regions_notrash, // no trash regions 1057 _verify_size_exact, // expect generation and heap sizes to match exactly 1058 _verify_gcstate_stable // there are no forwarded objects 1059 ); 1060 } 1061 1062 void ShenandoahVerifier::verify_after_concmark() { 1063 verify_at_safepoint( 1064 "After Mark", 1065 _verify_remembered_disable, // do not verify remembered set 1066 _verify_forwarded_none, // no forwarded references 1067 _verify_marked_complete_satb_empty, 1068 // bitmaps as precise as we can get, except dangling j.l.r.Refs 1069 _verify_cset_none, // no references to cset anymore 1070 _verify_liveness_complete, // liveness data must be complete here 1071 _verify_regions_disable, // trash regions not yet recycled 1072 _verify_size_exact, // expect generation and heap sizes to match exactly 1073 _verify_gcstate_stable_weakroots // heap is still stable, weakroots are in progress 1074 ); 1075 } 1076 1077 void ShenandoahVerifier::verify_before_evacuation() { 1078 verify_at_safepoint( 1079 "Before Evacuation", 1080 _verify_remembered_disable, // do not verify remembered set 1081 _verify_forwarded_none, // no forwarded references 1082 _verify_marked_complete_except_references, // walk over marked objects too 1083 _verify_cset_disable, // non-forwarded references to cset expected 1084 _verify_liveness_complete, // liveness data must be complete here 1085 _verify_regions_disable, // trash regions not yet recycled 1086 _verify_size_adjusted_for_padding, // expect generation and heap sizes to match after adjustments 1087 // for promote in place padding 1088 _verify_gcstate_stable_weakroots // heap is still stable, weakroots are in progress 1089 ); 1090 } 1091 1092 void ShenandoahVerifier::verify_during_evacuation() { 1093 verify_at_safepoint( 1094 "During Evacuation", 1095 _verify_remembered_disable, // do not verify remembered set 1096 _verify_forwarded_allow, // some forwarded references are allowed 1097 _verify_marked_disable, // walk only roots 1098 _verify_cset_disable, // some cset references are not forwarded yet 1099 _verify_liveness_disable, // liveness data might be already stale after pre-evacs 1100 _verify_regions_disable, // trash regions not yet recycled 1101 _verify_size_disable, // we don't know how much of promote-in-place work has been completed 1102 _verify_gcstate_evacuation // evacuation is in progress 1103 ); 1104 } 1105 1106 void ShenandoahVerifier::verify_after_evacuation() { 1107 verify_at_safepoint( 1108 "After Evacuation", 1109 _verify_remembered_disable, // do not verify remembered set 1110 _verify_forwarded_allow, // objects are still forwarded 1111 _verify_marked_complete, // bitmaps might be stale, but alloc-after-mark should be well 1112 _verify_cset_forwarded, // all cset refs are fully forwarded 1113 _verify_liveness_disable, // no reliable liveness data anymore 1114 _verify_regions_notrash, // trash regions have been recycled already 1115 _verify_size_exact, // expect generation and heap sizes to match exactly 1116 _verify_gcstate_forwarded // evacuation produced some forwarded objects 1117 ); 1118 } 1119 1120 void ShenandoahVerifier::verify_before_updaterefs() { 1121 verify_at_safepoint( 1122 "Before Updating References", 1123 _verify_remembered_before_updating_references, // verify read-write remembered set 1124 _verify_forwarded_allow, // forwarded references allowed 1125 _verify_marked_complete, // bitmaps might be stale, but alloc-after-mark should be well 1126 _verify_cset_forwarded, // all cset refs are fully forwarded 1127 _verify_liveness_disable, // no reliable liveness data anymore 1128 _verify_regions_notrash, // trash regions have been recycled already 1129 _verify_size_exact, // expect generation and heap sizes to match exactly 1130 _verify_gcstate_updating // evacuation should have produced some forwarded objects 1131 ); 1132 } 1133 1134 // We have not yet cleanup (reclaimed) the collection set 1135 void ShenandoahVerifier::verify_after_updaterefs() { 1136 verify_at_safepoint( 1137 "After Updating References", 1138 _verify_remembered_disable, // do not verify remembered set 1139 _verify_forwarded_none, // no forwarded references 1140 _verify_marked_complete, // bitmaps might be stale, but alloc-after-mark should be well 1141 _verify_cset_none, // no cset references, all updated 1142 _verify_liveness_disable, // no reliable liveness data anymore 1143 _verify_regions_nocset, // no cset regions, trash regions have appeared 1144 _verify_size_exact, // expect generation and heap sizes to match exactly 1145 _verify_gcstate_stable // update refs had cleaned up forwarded objects 1146 ); 1147 } 1148 1149 void ShenandoahVerifier::verify_after_degenerated() { 1150 verify_at_safepoint( 1151 "After Degenerated GC", 1152 _verify_remembered_disable, // do not verify remembered set 1153 _verify_forwarded_none, // all objects are non-forwarded 1154 _verify_marked_complete, // all objects are marked in complete bitmap 1155 _verify_cset_none, // no cset references 1156 _verify_liveness_disable, // no reliable liveness data anymore 1157 _verify_regions_notrash_nocset, // no trash, no cset 1158 _verify_size_exact, // expect generation and heap sizes to match exactly 1159 _verify_gcstate_stable // degenerated refs had cleaned up forwarded objects 1160 ); 1161 } 1162 1163 void ShenandoahVerifier::verify_before_fullgc() { 1164 verify_at_safepoint( 1165 "Before Full GC", 1166 _verify_remembered_disable, // do not verify remembered set 1167 _verify_forwarded_allow, // can have forwarded objects 1168 _verify_marked_disable, // do not verify marked: lots ot time wasted checking dead allocations 1169 _verify_cset_disable, // cset might be foobared 1170 _verify_liveness_disable, // no reliable liveness data anymore 1171 _verify_regions_disable, // no reliable region data here 1172 _verify_size_disable, // if we degenerate during evacuation, usage not valid: padding and deferred accounting 1173 _verify_gcstate_disable // no reliable gcstate data 1174 ); 1175 } 1176 1177 void ShenandoahVerifier::verify_after_fullgc() { 1178 verify_at_safepoint( 1179 "After Full GC", 1180 _verify_remembered_after_full_gc, // verify read-write remembered set 1181 _verify_forwarded_none, // all objects are non-forwarded 1182 _verify_marked_complete, // all objects are marked in complete bitmap 1183 _verify_cset_none, // no cset references 1184 _verify_liveness_disable, // no reliable liveness data anymore 1185 _verify_regions_notrash_nocset, // no trash, no cset 1186 _verify_size_exact, // expect generation and heap sizes to match exactly 1187 _verify_gcstate_stable // full gc cleaned up everything 1188 ); 1189 } 1190 1191 class ShenandoahVerifyNoForwarded : public BasicOopIterateClosure { 1192 private: 1193 template <class T> 1194 void do_oop_work(T* p) { 1195 T o = RawAccess<>::oop_load(p); 1196 if (!CompressedOops::is_null(o)) { 1197 oop obj = CompressedOops::decode_not_null(o); 1198 oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj); 1199 if (obj != fwd) { 1200 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr, 1201 "Verify Roots", "Should not be forwarded", __FILE__, __LINE__); 1202 } 1203 } 1204 } 1205 1206 public: 1207 void do_oop(narrowOop* p) { do_oop_work(p); } 1208 void do_oop(oop* p) { do_oop_work(p); } 1209 }; 1210 1211 class ShenandoahVerifyInToSpaceClosure : public BasicOopIterateClosure { 1212 private: 1213 template <class T> 1214 void do_oop_work(T* p) { 1215 T o = RawAccess<>::oop_load(p); 1216 if (!CompressedOops::is_null(o)) { 1217 oop obj = CompressedOops::decode_not_null(o); 1218 ShenandoahHeap* heap = ShenandoahHeap::heap(); 1219 1220 if (!heap->marking_context()->is_marked_or_old(obj)) { 1221 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr, 1222 "Verify Roots In To-Space", "Should be marked", __FILE__, __LINE__); 1223 } 1224 1225 if (heap->in_collection_set(obj)) { 1226 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr, 1227 "Verify Roots In To-Space", "Should not be in collection set", __FILE__, __LINE__); 1228 } 1229 1230 oop fwd = ShenandoahForwarding::get_forwardee_raw_unchecked(obj); 1231 if (obj != fwd) { 1232 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr, 1233 "Verify Roots In To-Space", "Should not be forwarded", __FILE__, __LINE__); 1234 } 1235 } 1236 } 1237 1238 public: 1239 void do_oop(narrowOop* p) override { do_oop_work(p); } 1240 void do_oop(oop* p) override { do_oop_work(p); } 1241 }; 1242 1243 void ShenandoahVerifier::verify_roots_in_to_space() { 1244 ShenandoahVerifyInToSpaceClosure cl; 1245 ShenandoahRootVerifier::roots_do(&cl); 1246 } 1247 1248 void ShenandoahVerifier::verify_roots_no_forwarded() { 1249 ShenandoahVerifyNoForwarded cl; 1250 ShenandoahRootVerifier::roots_do(&cl); 1251 } 1252 1253 template<typename Scanner> 1254 class ShenandoahVerifyRemSetClosure : public BasicOopIterateClosure { 1255 protected: 1256 ShenandoahGenerationalHeap* const _heap; 1257 Scanner* const _scanner; 1258 const char* _message; 1259 1260 public: 1261 // Argument distinguishes between initial mark or start of update refs verification. 1262 explicit ShenandoahVerifyRemSetClosure(Scanner* scanner, const char* message) : 1263 _heap(ShenandoahGenerationalHeap::heap()), 1264 _scanner(scanner), 1265 _message(message) {} 1266 1267 template<class T> 1268 inline void work(T* p) { 1269 T o = RawAccess<>::oop_load(p); 1270 if (!CompressedOops::is_null(o)) { 1271 oop obj = CompressedOops::decode_not_null(o); 1272 if (_heap->is_in_young(obj) && !_scanner->is_card_dirty((HeapWord*) p)) { 1273 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, p, nullptr, 1274 _message, "clean card should be dirty", __FILE__, __LINE__); 1275 } 1276 } 1277 } 1278 1279 void do_oop(narrowOop* p) override { work(p); } 1280 void do_oop(oop* p) override { work(p); } 1281 }; 1282 1283 ShenandoahMarkingContext* ShenandoahVerifier::get_marking_context_for_old() { 1284 shenandoah_assert_generations_reconciled(); 1285 if (_heap->old_generation()->is_mark_complete() || _heap->gc_generation()->is_global()) { 1286 return _heap->complete_marking_context(); 1287 } 1288 return nullptr; 1289 } 1290 1291 template<typename Scanner> 1292 void ShenandoahVerifier::help_verify_region_rem_set(Scanner* scanner, ShenandoahHeapRegion* r, ShenandoahMarkingContext* ctx, 1293 HeapWord* registration_watermark, const char* message) { 1294 ShenandoahVerifyRemSetClosure<Scanner> check_interesting_pointers(scanner, message); 1295 HeapWord* from = r->bottom(); 1296 HeapWord* obj_addr = from; 1297 if (r->is_humongous_start()) { 1298 oop obj = cast_to_oop(obj_addr); 1299 if ((ctx == nullptr) || ctx->is_marked(obj)) { 1300 // For humongous objects, the typical object is an array, so the following checks may be overkill 1301 // For regular objects (not object arrays), if the card holding the start of the object is dirty, 1302 // we do not need to verify that cards spanning interesting pointers within this object are dirty. 1303 if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) { 1304 obj->oop_iterate(&check_interesting_pointers); 1305 } 1306 // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered 1307 } 1308 // else, this humongous object is not live so no need to verify its internal pointers 1309 1310 if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) { 1311 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message, 1312 "object not properly registered", __FILE__, __LINE__); 1313 } 1314 } else if (!r->is_humongous()) { 1315 HeapWord* top = r->top(); 1316 while (obj_addr < top) { 1317 oop obj = cast_to_oop(obj_addr); 1318 // ctx->is_marked() returns true if mark bit set or if obj above TAMS. 1319 if ((ctx == nullptr) || ctx->is_marked(obj)) { 1320 // For regular objects (not object arrays), if the card holding the start of the object is dirty, 1321 // we do not need to verify that cards spanning interesting pointers within this object are dirty. 1322 if (!scanner->is_card_dirty(obj_addr) || obj->is_objArray()) { 1323 obj->oop_iterate(&check_interesting_pointers); 1324 } 1325 // else, object's start is marked dirty and obj is not an objArray, so any interesting pointers are covered 1326 1327 if ((obj_addr < registration_watermark) && !scanner->verify_registration(obj_addr, ctx)) { 1328 ShenandoahAsserts::print_failure(ShenandoahAsserts::_safe_all, obj, obj_addr, nullptr, message, 1329 "object not properly registered", __FILE__, __LINE__); 1330 } 1331 obj_addr += obj->size(); 1332 } else { 1333 // This object is not live so we don't verify dirty cards contained therein 1334 HeapWord* tams = ctx->top_at_mark_start(r); 1335 obj_addr = ctx->get_next_marked_addr(obj_addr, tams); 1336 } 1337 } 1338 } 1339 } 1340 1341 class ShenandoahWriteTableScanner { 1342 private: 1343 ShenandoahScanRemembered* _scanner; 1344 public: 1345 explicit ShenandoahWriteTableScanner(ShenandoahScanRemembered* scanner) : _scanner(scanner) {} 1346 1347 bool is_card_dirty(HeapWord* obj_addr) { 1348 return _scanner->is_write_card_dirty(obj_addr); 1349 } 1350 1351 bool verify_registration(HeapWord* obj_addr, ShenandoahMarkingContext* ctx) { 1352 return _scanner->verify_registration(obj_addr, ctx); 1353 } 1354 }; 1355 1356 // Assure that the remember set has a dirty card everywhere there is an interesting pointer. 1357 // This examines the read_card_table between bottom() and top() since all PLABS are retired 1358 // before the safepoint for init_mark. Actually, we retire them before update-references and don't 1359 // restore them until the start of evacuation. 1360 void ShenandoahVerifier::verify_rem_set_before_mark() { 1361 shenandoah_assert_safepoint(); 1362 shenandoah_assert_generational(); 1363 1364 ShenandoahMarkingContext* ctx = get_marking_context_for_old(); 1365 ShenandoahOldGeneration* old_generation = _heap->old_generation(); 1366 1367 log_debug(gc)("Verifying remembered set at %s mark", old_generation->is_doing_mixed_evacuations() ? "mixed" : "young"); 1368 1369 ShenandoahScanRemembered* scanner = old_generation->card_scan(); 1370 for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) { 1371 ShenandoahHeapRegion* r = _heap->get_region(i); 1372 if (r->is_old() && r->is_active()) { 1373 help_verify_region_rem_set(scanner, r, ctx, r->end(), "Verify init-mark remembered set violation"); 1374 } 1375 } 1376 } 1377 1378 void ShenandoahVerifier::verify_rem_set_after_full_gc() { 1379 shenandoah_assert_safepoint(); 1380 shenandoah_assert_generational(); 1381 1382 ShenandoahWriteTableScanner scanner(ShenandoahGenerationalHeap::heap()->old_generation()->card_scan()); 1383 for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) { 1384 ShenandoahHeapRegion* r = _heap->get_region(i); 1385 if (r->is_old() && !r->is_cset()) { 1386 help_verify_region_rem_set(&scanner, r, nullptr, r->top(), "Remembered set violation at end of Full GC"); 1387 } 1388 } 1389 } 1390 1391 // Assure that the remember set has a dirty card everywhere there is an interesting pointer. Even though 1392 // the update-references scan of remembered set only examines cards up to update_watermark, the remembered 1393 // set should be valid through top. This examines the write_card_table between bottom() and top() because 1394 // all PLABS are retired immediately before the start of update refs. 1395 void ShenandoahVerifier::verify_rem_set_before_update_ref() { 1396 shenandoah_assert_safepoint(); 1397 shenandoah_assert_generational(); 1398 1399 ShenandoahMarkingContext* ctx = get_marking_context_for_old(); 1400 ShenandoahWriteTableScanner scanner(_heap->old_generation()->card_scan()); 1401 for (size_t i = 0, n = _heap->num_regions(); i < n; ++i) { 1402 ShenandoahHeapRegion* r = _heap->get_region(i); 1403 if (r->is_old() && !r->is_cset()) { 1404 help_verify_region_rem_set(&scanner, r, ctx, r->get_update_watermark(), "Remembered set violation at init-update-references"); 1405 } 1406 } 1407 } 1408 1409 void ShenandoahVerifier::verify_before_rebuilding_free_set() { 1410 ShenandoahGenerationStatsClosure cl; 1411 _heap->heap_region_iterate(&cl); 1412 1413 ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->old_generation(), cl.old); 1414 ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->young_generation(), cl.young); 1415 ShenandoahGenerationStatsClosure::validate_usage(false, "Before free set rebuild", _heap->global_generation(), cl.global); 1416 } --- EOF ---