1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "ci/ciEnv.hpp" 26 #include "ci/ciMetadata.hpp" 27 #include "cds/cdsConfig.hpp" 28 #include "cds/metaspaceShared.hpp" 29 #include "classfile/classLoaderData.hpp" 30 #include "classfile/compactHashtable.hpp" 31 #include "classfile/javaClasses.hpp" 32 #include "classfile/symbolTable.hpp" 33 #include "classfile/systemDictionaryShared.hpp" 34 #include "compiler/compileTask.hpp" 35 #include "memory/metadataFactory.hpp" 36 #include "memory/metaspaceClosure.hpp" 37 #include "memory/resourceArea.hpp" 38 #include "memory/universe.hpp" 39 #include "oops/method.hpp" 40 #include "oops/methodCounters.hpp" 41 #include "oops/recompilationSchedule.hpp" 42 #include "oops/trainingData.hpp" 43 #include "runtime/arguments.hpp" 44 #include "runtime/javaThread.inline.hpp" 45 #include "runtime/jniHandles.inline.hpp" 46 #include "utilities/growableArray.hpp" 47 48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff); 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary; 50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping; 51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr; 52 int TrainingData::TrainingDataLocker::_lock_mode; 53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false; 54 55 MethodTrainingData::MethodTrainingData() { 56 // Used by cppVtables.cpp only 57 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 58 } 59 60 KlassTrainingData::KlassTrainingData() { 61 // Used by cppVtables.cpp only 62 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 63 } 64 65 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) { 66 // Used by cppVtables.cpp only 67 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 68 } 69 70 void TrainingData::initialize() { 71 // this is a nop if training modes are not enabled 72 if (have_data() || need_data()) { 73 // Data structures that we have do not currently support iterative training. So you cannot replay 74 // and train at the same time. Going forward we may want to adjust iteration/search to enable that. 75 guarantee(have_data() != need_data(), "Iterative training is not supported"); 76 TrainingDataLocker::initialize(); 77 } 78 RecompilationSchedule::initialize(); 79 } 80 81 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) { 82 guarantee(TrainingData::Key::can_compute_cds_hash(k), ""); 83 TrainingData* td1 = TrainingData::lookup_archived_training_data(k); 84 guarantee(td == td1, ""); 85 } 86 87 void TrainingData::verify() { 88 if (TrainingData::have_data()) { 89 archived_training_data_dictionary()->iterate([&](TrainingData* td) { 90 if (td->is_KlassTrainingData()) { 91 KlassTrainingData* ktd = td->as_KlassTrainingData(); 92 if (ktd->has_holder() && ktd->holder()->is_loaded()) { 93 Key k(ktd->holder()); 94 verify_archived_entry(td, &k); 95 } 96 ktd->verify(); 97 } else if (td->is_MethodTrainingData()) { 98 MethodTrainingData* mtd = td->as_MethodTrainingData(); 99 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) { 100 Key k(mtd->holder()); 101 verify_archived_entry(td, &k); 102 } 103 mtd->verify(); 104 } else if (td->is_CompileTrainingData()) { 105 td->as_CompileTrainingData()->verify(); 106 } 107 }); 108 } 109 } 110 111 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) { 112 MethodTrainingData* mtd = nullptr; 113 if (!have_data() && !need_data()) { 114 return mtd; 115 } 116 // Try grabbing the cached value first. 117 // Cache value is stored in MethodCounters and the following are the 118 // possible states: 119 // 1. Cached value is method_training_data_sentinel(). 120 // This is an initial state and needs a full lookup. 121 // 2. Cached value is null. 122 // Lookup failed the last time, if we don't plan to create a new TD object, 123 // i.e. null_if_no_found == true, then just return a null. 124 // 3. Cache value is not null. 125 // Return it, the value of training_data_lookup_failed doesn't matter. 126 MethodCounters* mcs = method->method_counters(); 127 if (mcs != nullptr) { 128 mtd = mcs->method_training_data(); 129 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) { 130 return mtd; 131 } 132 if (null_if_not_found && mtd == nullptr) { 133 assert(mtd == nullptr, "No training data found"); 134 return nullptr; 135 } 136 } else if (use_cache) { 137 mcs = Method::build_method_counters(Thread::current(), method()); 138 } 139 140 TrainingData* td = nullptr; 141 142 Key key(method()); 143 if (have_data()) { 144 td = lookup_archived_training_data(&key); 145 if (td != nullptr) { 146 mtd = td->as_MethodTrainingData(); 147 } else { 148 mtd = nullptr; 149 } 150 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 151 method->init_training_data(mtd); 152 } 153 154 if (need_data()) { 155 TrainingDataLocker l; 156 td = training_data_set()->find(&key); 157 if (td == nullptr) { 158 if (!null_if_not_found) { 159 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder()); 160 if (ktd == nullptr) { 161 return nullptr; // allocation failure 162 } 163 mtd = MethodTrainingData::allocate(method(), ktd); 164 if (mtd == nullptr) { 165 return nullptr; // allocation failure 166 } 167 td = training_data_set()->install(mtd); 168 assert(td == mtd, ""); 169 } else { 170 mtd = nullptr; 171 } 172 } else { 173 mtd = td->as_MethodTrainingData(); 174 } 175 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 176 method->init_training_data(mtd); 177 } 178 179 return mtd; 180 } 181 182 void MethodTrainingData::print_on(outputStream* st, bool name_only) const { 183 if (has_holder()) { 184 _klass->print_on(st, true); 185 st->print("."); 186 name()->print_symbol_on(st); 187 signature()->print_symbol_on(st); 188 } 189 if (name_only) { 190 return; 191 } 192 if (!has_holder()) { 193 st->print("[SYM]"); 194 } 195 if (_level_mask) { 196 st->print(" LM%d", _level_mask); 197 } 198 st->print(" mc=%p mdo=%p", _final_counters, _final_profile); 199 } 200 201 CompileTrainingData* CompileTrainingData::make(CompileTask* task) { 202 int level = task->comp_level(); 203 int compile_id = task->compile_id(); 204 Thread* thread = Thread::current(); 205 methodHandle m(thread, task->method()); 206 if (m->method_holder() == nullptr) { 207 return nullptr; // do not record (dynamically generated method) 208 } 209 MethodTrainingData* mtd = MethodTrainingData::make(m); 210 if (mtd == nullptr) { 211 return nullptr; // allocation failure 212 } 213 mtd->notice_compilation(level); 214 215 TrainingDataLocker l; 216 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id); 217 if (ctd != nullptr) { 218 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1]; 219 if (last_ctd != nullptr) { 220 assert(mtd->highest_top_level() >= level, "consistency"); 221 if (last_ctd->compile_id() < compile_id) { 222 last_ctd->clear_init_deps(); 223 last_ctd = ctd; 224 } 225 } else { 226 last_ctd = ctd; 227 mtd->notice_toplevel_compilation(level); 228 } 229 } 230 return ctd; 231 } 232 233 234 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) { 235 LogStreamHandle(Trace, training) log; 236 if (log.is_enabled()) { 237 log.print("CTD "); print_on(&log); log.cr(); 238 log.print("KTD "); ktd->print_on(&log); log.cr(); 239 } 240 assert(ktd!= nullptr && ktd->has_holder(), ""); 241 assert(_init_deps.contains(ktd), ""); 242 assert(_init_deps_left > 0, ""); 243 244 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1); 245 246 if (log.is_enabled()) { 247 uint init_deps_left2 = compute_init_deps_left(); 248 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2); 249 ktd->print_on(&log, true); 250 } 251 } 252 253 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) { 254 int left = 0; 255 for (int i = 0; i < _init_deps.length(); i++) { 256 KlassTrainingData* ktd = _init_deps.at(i); 257 // Ignore symbolic refs and already initialized classes (unless explicitly requested). 258 if (ktd->has_holder()) { 259 InstanceKlass* holder = ktd->holder(); 260 if (!ktd->holder()->is_initialized() || count_initialized) { 261 ++left; 262 } else if (holder->defined_by_other_loaders()) { 263 Key k(holder); 264 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) { 265 ++left; 266 } 267 } 268 } 269 } 270 return left; 271 } 272 273 void CompileTrainingData::print_on(outputStream* st, bool name_only) const { 274 _method->print_on(st, true); 275 st->print("#%dL%d", _compile_id, _level); 276 if (name_only) { 277 return; 278 } 279 if (_init_deps.length() > 0) { 280 if (_init_deps_left > 0) { 281 st->print(" udeps=%d", _init_deps_left); 282 } 283 for (int i = 0, len = _init_deps.length(); i < len; i++) { 284 st->print(" dep:"); 285 _init_deps.at(i)->print_on(st, true); 286 } 287 } 288 } 289 290 void CompileTrainingData::notice_inlined_method(CompileTask* task, 291 const methodHandle& method) { 292 MethodTrainingData* mtd = MethodTrainingData::make(method); 293 if (mtd != nullptr) { 294 mtd->notice_compilation(task->comp_level(), true); 295 } 296 } 297 298 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) { 299 // A JIT is starting to look at class k. 300 // We could follow the queries that it is making, but it is 301 // simpler to assume, conservatively, that the JIT will 302 // eventually depend on the initialization state of k. 303 CompileTask* task = env->task(); 304 assert(task != nullptr, ""); 305 Method* method = task->method(); 306 InstanceKlass* compiling_klass = method->method_holder(); 307 if (what->is_metadata()) { 308 ciMetadata* md = what->as_metadata(); 309 if (md->is_loaded() && md->is_instance_klass()) { 310 ciInstanceKlass* cik = md->as_instance_klass(); 311 312 if (cik->is_initialized()) { 313 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass(); 314 KlassTrainingData* ktd = KlassTrainingData::make(ik); 315 if (ktd == nullptr) { 316 // Allocation failure or snapshot in progress 317 return; 318 } 319 // This JIT task is (probably) requesting that ik be initialized, 320 // so add him to my _init_deps list. 321 TrainingDataLocker l; 322 add_init_dep(ktd); 323 } 324 } 325 } 326 } 327 328 void KlassTrainingData::prepare(Visitor& visitor) { 329 if (visitor.is_visited(this)) { 330 return; 331 } 332 visitor.visit(this); 333 ClassLoaderData* loader_data = nullptr; 334 if (_holder != nullptr) { 335 loader_data = _holder->class_loader_data(); 336 } else { 337 loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD 338 } 339 _comp_deps.prepare(loader_data); 340 } 341 342 void MethodTrainingData::prepare(Visitor& visitor) { 343 if (visitor.is_visited(this)) { 344 return; 345 } 346 visitor.visit(this); 347 klass()->prepare(visitor); 348 if (has_holder()) { 349 _final_counters = holder()->method_counters(); 350 _final_profile = holder()->method_data(); 351 assert(_final_profile == nullptr || _final_profile->method() == holder(), ""); 352 } 353 for (int i = 0; i < CompLevel_count - 1; i++) { 354 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 355 if (ctd != nullptr) { 356 ctd->prepare(visitor); 357 } 358 } 359 } 360 361 void CompileTrainingData::prepare(Visitor& visitor) { 362 if (visitor.is_visited(this)) { 363 return; 364 } 365 visitor.visit(this); 366 method()->prepare(visitor); 367 ClassLoaderData* loader_data = _method->klass()->class_loader_data(); 368 _init_deps.prepare(loader_data); 369 _ci_records.prepare(loader_data); 370 } 371 372 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) { 373 Key key(holder); 374 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr; 375 KlassTrainingData* ktd = nullptr; 376 if (td != nullptr) { 377 ktd = td->as_KlassTrainingData(); 378 guarantee(!ktd->has_holder() || ktd->holder() == holder, ""); 379 if (ktd->has_holder()) { 380 return ktd; 381 } else { 382 ktd = nullptr; 383 } 384 } 385 if (need_data()) { 386 TrainingDataLocker l; 387 td = training_data_set()->find(&key); 388 if (td == nullptr) { 389 if (null_if_not_found) { 390 return nullptr; 391 } 392 ktd = KlassTrainingData::allocate(holder); 393 if (ktd == nullptr) { 394 return nullptr; // allocation failure 395 } 396 td = training_data_set()->install(ktd); 397 assert(ktd == td, ""); 398 } else { 399 ktd = td->as_KlassTrainingData(); 400 guarantee(ktd->holder() != nullptr, "null holder"); 401 } 402 assert(ktd != nullptr, ""); 403 guarantee(ktd->holder() == holder, ""); 404 } 405 return ktd; 406 } 407 408 void KlassTrainingData::print_on(outputStream* st, bool name_only) const { 409 if (has_holder()) { 410 name()->print_symbol_on(st); 411 switch (holder()->init_state()) { 412 case InstanceKlass::allocated: st->print("[A]"); break; 413 case InstanceKlass::loaded: st->print("[D]"); break; 414 case InstanceKlass::linked: st->print("[L]"); break; 415 case InstanceKlass::being_initialized: st->print("[i]"); break; 416 case InstanceKlass::fully_initialized: break; 417 case InstanceKlass::initialization_error: st->print("[E]"); break; 418 default: fatal("unknown state: %d", holder()->init_state()); 419 } 420 if (holder()->is_interface()) { 421 st->print("I"); 422 } 423 } else { 424 st->print("[SYM]"); 425 } 426 if (name_only) { 427 return; 428 } 429 if (_comp_deps.length() > 0) { 430 for (int i = 0, len = _comp_deps.length(); i < len; i++) { 431 st->print(" dep:"); 432 _comp_deps.at(i)->print_on(st, true); 433 } 434 } 435 } 436 437 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) { 438 assert(klass != nullptr, ""); 439 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve 440 // the handle object. 441 OopHandle handle(Universe::vm_global(), klass->java_mirror()); 442 _holder = klass; 443 assert(holder() == klass, ""); 444 } 445 446 void KlassTrainingData::notice_fully_initialized() { 447 ResourceMark rm; 448 assert(has_holder(), ""); 449 assert(holder()->is_initialized(), "wrong state: %s %s", 450 holder()->name()->as_C_string(), holder()->init_state_name()); 451 452 TrainingDataLocker l; // Not a real lock if we don't collect the data, 453 // that's why we need the atomic decrement below. 454 for (int i = 0; i < comp_dep_count(); i++) { 455 comp_dep(i)->dec_init_deps_left(this); 456 } 457 holder()->set_has_init_deps_processed(); 458 } 459 460 void TrainingData::init_dumptime_table(TRAPS) { 461 precond((!assembling_data() && !need_data()) || need_data() != assembling_data()); 462 if (assembling_data()) { 463 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 464 _archived_training_data_dictionary.iterate([&](TrainingData* record) { 465 _dumptime_training_data_dictionary->append(record); 466 }); 467 } 468 if (need_data()) { 469 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 470 TrainingDataLocker l; 471 TrainingDataLocker::snapshot(); 472 473 ResourceMark rm; 474 Visitor visitor(training_data_set()->size()); 475 training_data_set()->iterate([&](TrainingData* td) { 476 td->prepare(visitor); 477 if (!td->is_CompileTrainingData()) { 478 _dumptime_training_data_dictionary->append(td); 479 } 480 }); 481 482 if (AOTVerifyTrainingData) { 483 training_data_set()->verify(); 484 } 485 } 486 487 RecompilationSchedule::prepare(CHECK); 488 } 489 490 void TrainingData::iterate_roots(MetaspaceClosure* it) { 491 if (_dumptime_training_data_dictionary != nullptr) { 492 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 493 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it); 494 } 495 } 496 RecompilationSchedule::iterate_roots(it); 497 } 498 499 void TrainingData::dump_training_data() { 500 if (_dumptime_training_data_dictionary != nullptr) { 501 CompactHashtableStats stats; 502 _archived_training_data_dictionary_for_dumping.reset(); 503 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats); 504 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 505 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 506 #ifdef ASSERT 507 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) { 508 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data(); 509 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict"); 510 } 511 #endif // ASSERT 512 td = ArchiveBuilder::current()->get_buffered_addr(td); 513 uint hash = TrainingData::Key::cds_hash(td->key()); 514 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td); 515 writer.add(hash, delta); 516 } 517 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary"); 518 } 519 } 520 521 void TrainingData::cleanup_training_data() { 522 if (_dumptime_training_data_dictionary != nullptr) { 523 ResourceMark rm; 524 Visitor visitor(_dumptime_training_data_dictionary->length()); 525 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 526 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 527 td->cleanup(visitor); 528 } 529 // Throw away all elements with empty keys 530 int j = 0; 531 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 532 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 533 if (td->key()->is_empty()) { 534 continue; 535 } 536 if (i != j) { // no need to copy if it's the same 537 _dumptime_training_data_dictionary->at_put(j, td); 538 } 539 j++; 540 } 541 _dumptime_training_data_dictionary->trunc_to(j); 542 } 543 RecompilationSchedule::cleanup(); 544 } 545 546 void KlassTrainingData::cleanup(Visitor& visitor) { 547 if (visitor.is_visited(this)) { 548 return; 549 } 550 visitor.visit(this); 551 if (has_holder()) { 552 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr); 553 if (is_excluded) { 554 ResourceMark rm; 555 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name()); 556 _holder = nullptr; 557 key()->make_empty(); 558 } 559 } 560 for (int i = 0; i < _comp_deps.length(); i++) { 561 _comp_deps.at(i)->cleanup(visitor); 562 } 563 } 564 565 void MethodTrainingData::cleanup(Visitor& visitor) { 566 if (visitor.is_visited(this)) { 567 return; 568 } 569 visitor.visit(this); 570 if (has_holder()) { 571 if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) { 572 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 573 if (_final_profile != nullptr && _final_profile->method() != _holder) { 574 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 575 } 576 _final_profile = nullptr; 577 _final_counters = nullptr; 578 _holder = nullptr; 579 key()->make_empty(); 580 } 581 } 582 for (int i = 0; i < CompLevel_count - 1; i++) { 583 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 584 if (ctd != nullptr) { 585 ctd->cleanup(visitor); 586 } 587 } 588 } 589 590 void KlassTrainingData::verify() { 591 for (int i = 0; i < comp_dep_count(); i++) { 592 CompileTrainingData* ctd = comp_dep(i); 593 if (!ctd->_init_deps.contains(this)) { 594 print_on(tty); tty->cr(); 595 ctd->print_on(tty); tty->cr(); 596 } 597 guarantee(ctd->_init_deps.contains(this), ""); 598 } 599 } 600 601 void MethodTrainingData::verify() { 602 iterate_compiles([](CompileTrainingData* ctd) { 603 ctd->verify(); 604 605 int init_deps_left1 = ctd->init_deps_left(); 606 int init_deps_left2 = ctd->compute_init_deps_left(); 607 608 if (init_deps_left1 != init_deps_left2) { 609 ctd->print_on(tty); tty->cr(); 610 } 611 guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d", 612 init_deps_left1, init_deps_left2, ctd->init_deps_left()); 613 }); 614 } 615 616 void CompileTrainingData::verify() { 617 for (int i = 0; i < init_dep_count(); i++) { 618 KlassTrainingData* ktd = init_dep(i); 619 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) { 620 LogStreamHandle(Warning, training) log; 621 if (log.is_enabled()) { 622 ResourceMark rm; 623 log.print("CTD "); print_value_on(&log); 624 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string()); 625 } 626 } 627 if (!ktd->_comp_deps.contains(this)) { 628 print_on(tty); tty->cr(); 629 ktd->print_on(tty); tty->cr(); 630 } 631 guarantee(ktd->_comp_deps.contains(this), ""); 632 } 633 } 634 635 void CompileTrainingData::cleanup(Visitor& visitor) { 636 if (visitor.is_visited(this)) { 637 return; 638 } 639 visitor.visit(this); 640 method()->cleanup(visitor); 641 } 642 643 void TrainingData::serialize(SerializeClosure* soc) { 644 if (soc->writing()) { 645 _archived_training_data_dictionary_for_dumping.serialize_header(soc); 646 } else { 647 _archived_training_data_dictionary.serialize_header(soc); 648 } 649 RecompilationSchedule::serialize(soc); 650 } 651 652 class TrainingDataPrinter : StackObj { 653 outputStream* _st; 654 int _index; 655 public: 656 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {} 657 void do_value(TrainingData* td) { 658 const char* type = (td->is_KlassTrainingData() ? "K" : 659 td->is_MethodTrainingData() ? "M" : 660 td->is_CompileTrainingData() ? "C" : "?"); 661 _st->print("%4d: %p %s ", _index++, td, type); 662 td->print_on(_st); 663 _st->cr(); 664 if (td->is_KlassTrainingData()) { 665 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) { 666 ResourceMark rm; 667 _st->print_raw(" C "); 668 ctd->print_on(_st); 669 _st->cr(); 670 }); 671 } else if (td->is_MethodTrainingData()) { 672 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) { 673 ResourceMark rm; 674 _st->print_raw(" C "); 675 ctd->print_on(_st); 676 _st->cr(); 677 }); 678 } else if (td->is_CompileTrainingData()) { 679 // ? 680 } 681 } 682 }; 683 684 void TrainingData::print_archived_training_data_on(outputStream* st) { 685 st->print_cr("Archived TrainingData Dictionary"); 686 TrainingDataPrinter tdp(st); 687 TrainingDataLocker::initialize(); 688 _archived_training_data_dictionary.iterate(&tdp); 689 RecompilationSchedule::print_archived_training_data_on(st); 690 } 691 692 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) { 693 iter->push(const_cast<Metadata**>(&_meta)); 694 } 695 696 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 697 _key.metaspace_pointers_do(iter); 698 } 699 700 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) { 701 return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta()); 702 } 703 704 uint TrainingData::Key::cds_hash(const Key* const& k) { 705 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta()); 706 } 707 708 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) { 709 // For this to work, all components of the key must be in shared metaspace. 710 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) { 711 return nullptr; 712 } 713 uint hash = TrainingData::Key::cds_hash(k); 714 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/); 715 if (td != nullptr) { 716 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) || 717 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) { 718 return td; 719 } else { 720 ShouldNotReachHere(); 721 } 722 } 723 return nullptr; 724 } 725 726 template <typename T> 727 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) { 728 iter->push(&_deps); 729 } 730 731 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 732 log_trace(aot, training)("Iter(KlassTrainingData): %p", this); 733 TrainingData::metaspace_pointers_do(iter); 734 _comp_deps.metaspace_pointers_do(iter); 735 iter->push(&_holder); 736 } 737 738 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 739 log_trace(aot, training)("Iter(MethodTrainingData): %p", this); 740 TrainingData::metaspace_pointers_do(iter); 741 iter->push(&_klass); 742 iter->push((Method**)&_holder); 743 for (int i = 0; i < CompLevel_count - 1; i++) { 744 iter->push(&_last_toplevel_compiles[i]); 745 } 746 iter->push(&_final_profile); 747 iter->push(&_final_counters); 748 } 749 750 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 751 log_trace(aot, training)("Iter(CompileTrainingData): %p", this); 752 TrainingData::metaspace_pointers_do(iter); 753 _init_deps.metaspace_pointers_do(iter); 754 _ci_records.metaspace_pointers_do(iter); 755 iter->push(&_method); 756 } 757 758 template <typename T> 759 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) { 760 if (_deps == nullptr && _deps_dyn != nullptr) { 761 int len = _deps_dyn->length(); 762 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared); 763 for (int i = 0; i < len; i++) { 764 _deps->at_put(i, _deps_dyn->at(i)); // copy 765 } 766 } 767 } 768 769 void KlassTrainingData::remove_unshareable_info() { 770 TrainingData::remove_unshareable_info(); 771 _comp_deps.remove_unshareable_info(); 772 } 773 774 void MethodTrainingData::remove_unshareable_info() { 775 TrainingData::remove_unshareable_info(); 776 if (_final_counters != nullptr) { 777 _final_counters->remove_unshareable_info(); 778 } 779 if (_final_profile != nullptr) { 780 _final_profile->remove_unshareable_info(); 781 } 782 } 783 784 void CompileTrainingData::remove_unshareable_info() { 785 TrainingData::remove_unshareable_info(); 786 _init_deps.remove_unshareable_info(); 787 _ci_records.remove_unshareable_info(); 788 _init_deps_left = compute_init_deps_left(true); 789 }