1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "cds/cdsConfig.hpp" 26 #include "ci/ciEnv.hpp" 27 #include "ci/ciMetadata.hpp" 28 #include "classfile/classLoaderData.hpp" 29 #include "classfile/compactHashtable.hpp" 30 #include "classfile/javaClasses.hpp" 31 #include "classfile/symbolTable.hpp" 32 #include "classfile/systemDictionaryShared.hpp" 33 #include "compiler/compileTask.hpp" 34 #include "memory/metadataFactory.hpp" 35 #include "memory/metaspaceClosure.hpp" 36 #include "memory/resourceArea.hpp" 37 #include "memory/universe.hpp" 38 #include "oops/method.hpp" 39 #include "oops/methodCounters.hpp" 40 #include "oops/recompilationSchedule.hpp" 41 #include "oops/trainingData.hpp" 42 #include "runtime/arguments.hpp" 43 #include "runtime/javaThread.inline.hpp" 44 #include "runtime/jniHandles.inline.hpp" 45 #include "utilities/growableArray.hpp" 46 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff); 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary; 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping; 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr; 51 int TrainingData::TrainingDataLocker::_lock_mode; 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false; 53 54 MethodTrainingData::MethodTrainingData() { 55 // Used by cppVtables.cpp only 56 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 57 } 58 59 KlassTrainingData::KlassTrainingData() { 60 // Used by cppVtables.cpp only 61 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 62 } 63 64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) { 65 // Used by cppVtables.cpp only 66 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 67 } 68 69 void TrainingData::initialize() { 70 // this is a nop if training modes are not enabled 71 if (have_data() || need_data()) { 72 // Data structures that we have do not currently support iterative training. So you cannot replay 73 // and train at the same time. Going forward we may want to adjust iteration/search to enable that. 74 guarantee(have_data() != need_data(), "Iterative training is not supported"); 75 TrainingDataLocker::initialize(); 76 } 77 RecompilationSchedule::initialize(); 78 } 79 80 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) { 81 guarantee(TrainingData::Key::can_compute_cds_hash(k), ""); 82 TrainingData* td1 = TrainingData::lookup_archived_training_data(k); 83 guarantee(td == td1, ""); 84 } 85 86 void TrainingData::verify() { 87 if (TrainingData::have_data() && !TrainingData::assembling_data()) { 88 archived_training_data_dictionary()->iterate([&](TrainingData* td) { 89 if (td->is_KlassTrainingData()) { 90 KlassTrainingData* ktd = td->as_KlassTrainingData(); 91 if (ktd->has_holder() && ktd->holder()->is_loaded()) { 92 Key k(ktd->holder()); 93 verify_archived_entry(td, &k); 94 } 95 ktd->verify(); 96 } else if (td->is_MethodTrainingData()) { 97 MethodTrainingData* mtd = td->as_MethodTrainingData(); 98 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) { 99 Key k(mtd->holder()); 100 verify_archived_entry(td, &k); 101 } 102 mtd->verify(/*verify_dep_counter*/true); 103 } 104 }); 105 } 106 if (TrainingData::need_data()) { 107 TrainingDataLocker l; 108 training_data_set()->iterate([&](TrainingData* td) { 109 if (td->is_KlassTrainingData()) { 110 KlassTrainingData* ktd = td->as_KlassTrainingData(); 111 ktd->verify(); 112 } else if (td->is_MethodTrainingData()) { 113 MethodTrainingData* mtd = td->as_MethodTrainingData(); 114 // During the training run init deps tracking is not setup yet, 115 // don't verify it. 116 mtd->verify(/*verify_dep_counter*/false); 117 } 118 }); 119 } 120 } 121 122 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) { 123 MethodTrainingData* mtd = nullptr; 124 if (!have_data() && !need_data()) { 125 return mtd; 126 } 127 // Try grabbing the cached value first. 128 // Cache value is stored in MethodCounters and the following are the 129 // possible states: 130 // 1. Cached value is method_training_data_sentinel(). 131 // This is an initial state and needs a full lookup. 132 // 2. Cached value is null. 133 // Lookup failed the last time, if we don't plan to create a new TD object, 134 // i.e. null_if_no_found == true, then just return a null. 135 // 3. Cache value is not null. 136 // Return it, the value of training_data_lookup_failed doesn't matter. 137 MethodCounters* mcs = method->method_counters(); 138 if (mcs != nullptr) { 139 mtd = mcs->method_training_data(); 140 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) { 141 return mtd; 142 } 143 if (null_if_not_found && mtd == nullptr) { 144 assert(mtd == nullptr, "No training data found"); 145 return nullptr; 146 } 147 } else if (use_cache) { 148 mcs = Method::build_method_counters(Thread::current(), method()); 149 } 150 151 TrainingData* td = nullptr; 152 153 Key key(method()); 154 if (have_data()) { 155 td = lookup_archived_training_data(&key); 156 if (td != nullptr) { 157 mtd = td->as_MethodTrainingData(); 158 } else { 159 mtd = nullptr; 160 } 161 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 162 method->init_training_data(mtd); 163 } 164 165 if (need_data()) { 166 TrainingDataLocker l; 167 td = training_data_set()->find(&key); 168 if (td == nullptr) { 169 if (!null_if_not_found) { 170 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder()); 171 if (ktd == nullptr) { 172 return nullptr; // allocation failure 173 } 174 mtd = MethodTrainingData::allocate(method(), ktd); 175 if (mtd == nullptr) { 176 return nullptr; // allocation failure 177 } 178 td = training_data_set()->install(mtd); 179 assert(td == mtd, ""); 180 } else { 181 mtd = nullptr; 182 } 183 } else { 184 mtd = td->as_MethodTrainingData(); 185 } 186 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 187 method->init_training_data(mtd); 188 } 189 190 return mtd; 191 } 192 193 void MethodTrainingData::print_on(outputStream* st, bool name_only) const { 194 if (has_holder()) { 195 _klass->print_on(st, true); 196 st->print("."); 197 name()->print_symbol_on(st); 198 signature()->print_symbol_on(st); 199 } 200 if (name_only) { 201 return; 202 } 203 if (!has_holder()) { 204 st->print("[SYM]"); 205 } 206 if (_level_mask) { 207 st->print(" LM%d", _level_mask); 208 } 209 st->print(" mc=%p mdo=%p", _final_counters, _final_profile); 210 } 211 212 CompileTrainingData* CompileTrainingData::make(CompileTask* task) { 213 int level = task->comp_level(); 214 int compile_id = task->compile_id(); 215 Thread* thread = Thread::current(); 216 methodHandle m(thread, task->method()); 217 if (m->method_holder() == nullptr) { 218 return nullptr; // do not record (dynamically generated method) 219 } 220 MethodTrainingData* mtd = MethodTrainingData::make(m); 221 if (mtd == nullptr) { 222 return nullptr; // allocation failure 223 } 224 mtd->notice_compilation(level); 225 226 TrainingDataLocker l; 227 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id); 228 if (ctd != nullptr) { 229 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1]; 230 if (last_ctd != nullptr) { 231 assert(mtd->highest_top_level() >= level, "consistency"); 232 if (last_ctd->compile_id() < compile_id) { 233 last_ctd->clear_init_deps(); 234 last_ctd = ctd; 235 } 236 } else { 237 last_ctd = ctd; 238 mtd->notice_toplevel_compilation(level); 239 } 240 } 241 return ctd; 242 } 243 244 245 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) { 246 LogStreamHandle(Trace, training) log; 247 if (log.is_enabled()) { 248 log.print("CTD "); print_on(&log); log.cr(); 249 log.print("KTD "); ktd->print_on(&log); log.cr(); 250 } 251 assert(ktd!= nullptr && ktd->has_holder(), ""); 252 assert(_init_deps.contains(ktd), ""); 253 assert(_init_deps_left > 0, ""); 254 255 uint init_deps_left1 = AtomicAccess::sub(&_init_deps_left, 1); 256 257 if (log.is_enabled()) { 258 uint init_deps_left2 = compute_init_deps_left(); 259 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2); 260 ktd->print_on(&log, true); 261 } 262 } 263 264 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) { 265 int left = 0; 266 for (int i = 0; i < _init_deps.length(); i++) { 267 KlassTrainingData* ktd = _init_deps.at(i); 268 // Ignore symbolic refs and already initialized classes (unless explicitly requested). 269 if (ktd->has_holder()) { 270 InstanceKlass* holder = ktd->holder(); 271 if (!ktd->holder()->is_initialized() || count_initialized) { 272 ++left; 273 } else if (holder->defined_by_other_loaders()) { 274 Key k(holder); 275 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) { 276 ++left; 277 } 278 } 279 } 280 } 281 return left; 282 } 283 284 void CompileTrainingData::print_on(outputStream* st, bool name_only) const { 285 _method->print_on(st, true); 286 st->print("#%dL%d", _compile_id, _level); 287 if (name_only) { 288 return; 289 } 290 if (_init_deps.length() > 0) { 291 if (_init_deps_left > 0) { 292 st->print(" udeps=%d", _init_deps_left); 293 } 294 for (int i = 0, len = _init_deps.length(); i < len; i++) { 295 st->print(" dep:"); 296 _init_deps.at(i)->print_on(st, true); 297 } 298 } 299 } 300 301 void CompileTrainingData::notice_inlined_method(CompileTask* task, 302 const methodHandle& method) { 303 MethodTrainingData* mtd = MethodTrainingData::make(method); 304 if (mtd != nullptr) { 305 mtd->notice_compilation(task->comp_level(), true); 306 } 307 } 308 309 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) { 310 // A JIT is starting to look at class k. 311 // We could follow the queries that it is making, but it is 312 // simpler to assume, conservatively, that the JIT will 313 // eventually depend on the initialization state of k. 314 ciMetadata* md = nullptr; 315 if (what->is_object()) { 316 md = what->as_object()->klass(); 317 } else if (what->is_metadata()) { 318 md = what->as_metadata(); 319 } 320 if (md != nullptr && md->is_loaded() && md->is_instance_klass()) { 321 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass(); 322 KlassTrainingData* ktd = KlassTrainingData::make(ik); 323 if (ktd == nullptr) { 324 // Allocation failure or snapshot in progress 325 return; 326 } 327 // This JIT task is (probably) requesting that ik be initialized, 328 // so add it to my _init_deps list. 329 TrainingDataLocker l; 330 add_init_dep(ktd); 331 } 332 } 333 334 void KlassTrainingData::prepare(Visitor& visitor) { 335 if (visitor.is_visited(this)) { 336 return; 337 } 338 visitor.visit(this); 339 ClassLoaderData* loader_data = nullptr; 340 if (_holder != nullptr) { 341 loader_data = _holder->class_loader_data(); 342 } else { 343 loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD 344 } 345 _comp_deps.prepare(loader_data); 346 } 347 348 void MethodTrainingData::prepare(Visitor& visitor) { 349 if (visitor.is_visited(this)) { 350 return; 351 } 352 visitor.visit(this); 353 klass()->prepare(visitor); 354 if (has_holder()) { 355 _final_counters = holder()->method_counters(); 356 _final_profile = holder()->method_data(); 357 assert(_final_profile == nullptr || _final_profile->method() == holder(), ""); 358 } 359 for (int i = 0; i < CompLevel_count - 1; i++) { 360 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 361 if (ctd != nullptr) { 362 ctd->prepare(visitor); 363 } 364 } 365 } 366 367 void CompileTrainingData::prepare(Visitor& visitor) { 368 if (visitor.is_visited(this)) { 369 return; 370 } 371 visitor.visit(this); 372 method()->prepare(visitor); 373 ClassLoaderData* loader_data = _method->klass()->class_loader_data(); 374 _init_deps.prepare(loader_data); 375 _ci_records.prepare(loader_data); 376 } 377 378 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) { 379 Key key(holder); 380 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr; 381 KlassTrainingData* ktd = nullptr; 382 if (td != nullptr) { 383 ktd = td->as_KlassTrainingData(); 384 guarantee(!ktd->has_holder() || ktd->holder() == holder, ""); 385 if (ktd->has_holder()) { 386 return ktd; 387 } else { 388 ktd = nullptr; 389 } 390 } 391 if (need_data()) { 392 TrainingDataLocker l; 393 td = training_data_set()->find(&key); 394 if (td == nullptr) { 395 if (null_if_not_found) { 396 return nullptr; 397 } 398 ktd = KlassTrainingData::allocate(holder); 399 if (ktd == nullptr) { 400 return nullptr; // allocation failure 401 } 402 td = training_data_set()->install(ktd); 403 assert(ktd == td, ""); 404 } else { 405 ktd = td->as_KlassTrainingData(); 406 guarantee(ktd->holder() != nullptr, "null holder"); 407 } 408 assert(ktd != nullptr, ""); 409 guarantee(ktd->holder() == holder, ""); 410 } 411 return ktd; 412 } 413 414 void KlassTrainingData::print_on(outputStream* st, bool name_only) const { 415 if (has_holder()) { 416 name()->print_symbol_on(st); 417 switch (holder()->init_state()) { 418 case InstanceKlass::allocated: st->print("[A]"); break; 419 case InstanceKlass::loaded: st->print("[D]"); break; 420 case InstanceKlass::linked: st->print("[L]"); break; 421 case InstanceKlass::being_initialized: st->print("[i]"); break; 422 case InstanceKlass::fully_initialized: break; 423 case InstanceKlass::initialization_error: st->print("[E]"); break; 424 default: fatal("unknown state: %d", holder()->init_state()); 425 } 426 if (holder()->is_interface()) { 427 st->print("I"); 428 } 429 } else { 430 st->print("[SYM]"); 431 } 432 if (name_only) { 433 return; 434 } 435 if (_comp_deps.length() > 0) { 436 for (int i = 0, len = _comp_deps.length(); i < len; i++) { 437 st->print(" dep:"); 438 _comp_deps.at(i)->print_on(st, true); 439 } 440 } 441 } 442 443 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) { 444 assert(klass != nullptr, ""); 445 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve 446 // the handle object. 447 OopHandle handle(Universe::vm_global(), klass->java_mirror()); 448 _holder = klass; 449 assert(holder() == klass, ""); 450 } 451 452 void KlassTrainingData::notice_fully_initialized() { 453 ResourceMark rm; 454 assert(has_holder(), ""); 455 assert(holder()->is_initialized(), "wrong state: %s %s", 456 holder()->name()->as_C_string(), holder()->init_state_name()); 457 458 TrainingDataLocker l; // Not a real lock if we don't collect the data, 459 // that's why we need the atomic decrement below. 460 for (int i = 0; i < comp_dep_count(); i++) { 461 comp_dep(i)->dec_init_deps_left_release(this); 462 } 463 holder()->set_has_init_deps_processed(); 464 } 465 466 void TrainingData::init_dumptime_table(TRAPS) { 467 precond((!assembling_data() && !need_data()) || need_data() != assembling_data()); 468 if (assembling_data()) { 469 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 470 _archived_training_data_dictionary.iterate([&](TrainingData* record) { 471 _dumptime_training_data_dictionary->append(record); 472 }); 473 } 474 if (need_data()) { 475 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 476 TrainingDataLocker l; 477 TrainingDataLocker::snapshot(); 478 479 ResourceMark rm; 480 Visitor visitor(training_data_set()->size()); 481 training_data_set()->iterate([&](TrainingData* td) { 482 td->prepare(visitor); 483 if (!td->is_CompileTrainingData()) { 484 _dumptime_training_data_dictionary->append(td); 485 } 486 }); 487 } 488 489 RecompilationSchedule::prepare(CHECK); 490 491 if (AOTVerifyTrainingData) { 492 TrainingData::verify(); 493 } 494 } 495 496 void TrainingData::iterate_roots(MetaspaceClosure* it) { 497 if (_dumptime_training_data_dictionary != nullptr) { 498 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 499 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it); 500 } 501 } 502 RecompilationSchedule::iterate_roots(it); 503 } 504 505 void TrainingData::dump_training_data() { 506 if (_dumptime_training_data_dictionary != nullptr) { 507 CompactHashtableStats stats; 508 _archived_training_data_dictionary_for_dumping.reset(); 509 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats); 510 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 511 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 512 #ifdef ASSERT 513 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) { 514 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data(); 515 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict"); 516 } 517 #endif // ASSERT 518 td = ArchiveBuilder::current()->get_buffered_addr(td); 519 uint hash = TrainingData::Key::cds_hash(td->key()); 520 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td); 521 writer.add(hash, delta); 522 } 523 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary"); 524 } 525 } 526 527 void TrainingData::cleanup_training_data() { 528 if (_dumptime_training_data_dictionary != nullptr) { 529 ResourceMark rm; 530 Visitor visitor(_dumptime_training_data_dictionary->length()); 531 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 532 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 533 td->cleanup(visitor); 534 } 535 // Throw away all elements with empty keys 536 int j = 0; 537 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 538 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 539 if (td->key()->is_empty()) { 540 continue; 541 } 542 if (i != j) { // no need to copy if it's the same 543 _dumptime_training_data_dictionary->at_put(j, td); 544 } 545 j++; 546 } 547 _dumptime_training_data_dictionary->trunc_to(j); 548 } 549 RecompilationSchedule::cleanup(); 550 } 551 552 void KlassTrainingData::cleanup(Visitor& visitor) { 553 if (visitor.is_visited(this)) { 554 return; 555 } 556 visitor.visit(this); 557 if (has_holder()) { 558 bool is_excluded = !holder()->is_loaded(); 559 if (CDSConfig::is_at_aot_safepoint()) { 560 // Check for AOT exclusion only at AOT safe point. 561 is_excluded |= SystemDictionaryShared::should_be_excluded(holder()); 562 } 563 if (is_excluded) { 564 ResourceMark rm; 565 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name()); 566 _holder = nullptr; 567 key()->make_empty(); 568 } 569 } 570 for (int i = 0; i < _comp_deps.length(); i++) { 571 _comp_deps.at(i)->cleanup(visitor); 572 } 573 } 574 575 void MethodTrainingData::cleanup(Visitor& visitor) { 576 if (visitor.is_visited(this)) { 577 return; 578 } 579 visitor.visit(this); 580 if (has_holder()) { 581 if (CDSConfig::is_at_aot_safepoint() && SystemDictionaryShared::should_be_excluded(holder()->method_holder())) { 582 // Check for AOT exclusion only at AOT safe point. 583 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 584 if (_final_profile != nullptr && _final_profile->method() != _holder) { 585 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 586 } 587 _final_profile = nullptr; 588 _final_counters = nullptr; 589 _holder = nullptr; 590 key()->make_empty(); 591 } 592 } 593 for (int i = 0; i < CompLevel_count - 1; i++) { 594 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 595 if (ctd != nullptr) { 596 ctd->cleanup(visitor); 597 } 598 } 599 } 600 601 void KlassTrainingData::verify() { 602 for (int i = 0; i < comp_dep_count(); i++) { 603 CompileTrainingData* ctd = comp_dep(i); 604 if (!ctd->_init_deps.contains(this)) { 605 print_on(tty); tty->cr(); 606 ctd->print_on(tty); tty->cr(); 607 } 608 guarantee(ctd->_init_deps.contains(this), ""); 609 } 610 } 611 612 void MethodTrainingData::verify(bool verify_dep_counter) { 613 iterate_compiles([&](CompileTrainingData* ctd) { 614 ctd->verify(verify_dep_counter); 615 }); 616 } 617 618 void CompileTrainingData::verify(bool verify_dep_counter) { 619 for (int i = 0; i < init_dep_count(); i++) { 620 KlassTrainingData* ktd = init_dep(i); 621 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) { 622 LogStreamHandle(Warning, training) log; 623 if (log.is_enabled()) { 624 ResourceMark rm; 625 log.print("CTD "); print_value_on(&log); 626 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string()); 627 } 628 } 629 if (!ktd->_comp_deps.contains(this)) { 630 print_on(tty); tty->cr(); 631 ktd->print_on(tty); tty->cr(); 632 } 633 guarantee(ktd->_comp_deps.contains(this), ""); 634 } 635 636 if (verify_dep_counter) { 637 int init_deps_left1 = init_deps_left_acquire(); 638 int init_deps_left2 = compute_init_deps_left(); 639 640 bool invariant = (init_deps_left1 >= init_deps_left2); 641 if (!invariant) { 642 print_on(tty); 643 tty->cr(); 644 } 645 guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2); 646 } 647 } 648 649 void CompileTrainingData::cleanup(Visitor& visitor) { 650 if (visitor.is_visited(this)) { 651 return; 652 } 653 visitor.visit(this); 654 method()->cleanup(visitor); 655 } 656 657 void TrainingData::serialize(SerializeClosure* soc) { 658 if (soc->writing()) { 659 _archived_training_data_dictionary_for_dumping.serialize_header(soc); 660 } else { 661 _archived_training_data_dictionary.serialize_header(soc); 662 } 663 RecompilationSchedule::serialize(soc); 664 } 665 666 class TrainingDataPrinter : StackObj { 667 outputStream* _st; 668 int _index; 669 public: 670 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {} 671 void do_value(TrainingData* td) { 672 const char* type = (td->is_KlassTrainingData() ? "K" : 673 td->is_MethodTrainingData() ? "M" : 674 td->is_CompileTrainingData() ? "C" : "?"); 675 _st->print("%4d: %p %s ", _index++, td, type); 676 td->print_on(_st); 677 _st->cr(); 678 if (td->is_KlassTrainingData()) { 679 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) { 680 ResourceMark rm; 681 _st->print_raw(" C "); 682 ctd->print_on(_st); 683 _st->cr(); 684 }); 685 } else if (td->is_MethodTrainingData()) { 686 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) { 687 ResourceMark rm; 688 _st->print_raw(" C "); 689 ctd->print_on(_st); 690 _st->cr(); 691 }); 692 } else if (td->is_CompileTrainingData()) { 693 // ? 694 } 695 } 696 }; 697 698 void TrainingData::print_archived_training_data_on(outputStream* st) { 699 st->print_cr("Archived TrainingData Dictionary"); 700 TrainingDataPrinter tdp(st); 701 TrainingDataLocker::initialize(); 702 _archived_training_data_dictionary.iterate(&tdp); 703 RecompilationSchedule::print_archived_training_data_on(st); 704 } 705 706 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) { 707 iter->push(const_cast<Metadata**>(&_meta)); 708 } 709 710 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 711 _key.metaspace_pointers_do(iter); 712 } 713 714 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) { 715 return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta()); 716 } 717 718 uint TrainingData::Key::cds_hash(const Key* const& k) { 719 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta()); 720 } 721 722 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) { 723 // For this to work, all components of the key must be in shared metaspace. 724 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) { 725 return nullptr; 726 } 727 uint hash = TrainingData::Key::cds_hash(k); 728 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/); 729 if (td != nullptr) { 730 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) || 731 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) { 732 return td; 733 } else { 734 ShouldNotReachHere(); 735 } 736 } 737 return nullptr; 738 } 739 740 template <typename T> 741 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) { 742 iter->push(&_deps); 743 } 744 745 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 746 log_trace(aot, training)("Iter(KlassTrainingData): %p", this); 747 TrainingData::metaspace_pointers_do(iter); 748 _comp_deps.metaspace_pointers_do(iter); 749 iter->push(&_holder); 750 } 751 752 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 753 log_trace(aot, training)("Iter(MethodTrainingData): %p", this); 754 TrainingData::metaspace_pointers_do(iter); 755 iter->push(&_klass); 756 iter->push((Method**)&_holder); 757 for (int i = 0; i < CompLevel_count - 1; i++) { 758 iter->push(&_last_toplevel_compiles[i]); 759 } 760 iter->push(&_final_profile); 761 iter->push(&_final_counters); 762 } 763 764 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 765 log_trace(aot, training)("Iter(CompileTrainingData): %p", this); 766 TrainingData::metaspace_pointers_do(iter); 767 _init_deps.metaspace_pointers_do(iter); 768 _ci_records.metaspace_pointers_do(iter); 769 iter->push(&_method); 770 } 771 772 template <typename T> 773 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) { 774 if (_deps == nullptr && _deps_dyn != nullptr) { 775 int len = _deps_dyn->length(); 776 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared); 777 for (int i = 0; i < len; i++) { 778 _deps->at_put(i, _deps_dyn->at(i)); // copy 779 } 780 } 781 } 782 783 void KlassTrainingData::remove_unshareable_info() { 784 TrainingData::remove_unshareable_info(); 785 _comp_deps.remove_unshareable_info(); 786 } 787 788 void MethodTrainingData::remove_unshareable_info() { 789 TrainingData::remove_unshareable_info(); 790 if (_final_counters != nullptr) { 791 _final_counters->remove_unshareable_info(); 792 } 793 if (_final_profile != nullptr) { 794 _final_profile->remove_unshareable_info(); 795 } 796 } 797 798 void CompileTrainingData::remove_unshareable_info() { 799 TrainingData::remove_unshareable_info(); 800 _init_deps.remove_unshareable_info(); 801 _ci_records.remove_unshareable_info(); 802 _init_deps_left = compute_init_deps_left(true); 803 }