1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "cds/cdsConfig.hpp" 26 #include "ci/ciEnv.hpp" 27 #include "ci/ciMetadata.hpp" 28 #include "classfile/classLoaderData.hpp" 29 #include "classfile/compactHashtable.hpp" 30 #include "classfile/javaClasses.hpp" 31 #include "classfile/symbolTable.hpp" 32 #include "classfile/systemDictionaryShared.hpp" 33 #include "compiler/compileTask.hpp" 34 #include "memory/metadataFactory.hpp" 35 #include "memory/metaspaceClosure.hpp" 36 #include "memory/resourceArea.hpp" 37 #include "memory/universe.hpp" 38 #include "oops/method.hpp" 39 #include "oops/methodCounters.hpp" 40 #include "oops/trainingData.hpp" 41 #include "runtime/arguments.hpp" 42 #include "runtime/javaThread.inline.hpp" 43 #include "runtime/jniHandles.inline.hpp" 44 #include "utilities/growableArray.hpp" 45 46 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff); 47 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary; 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping; 49 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr; 50 int TrainingData::TrainingDataLocker::_lock_mode; 51 volatile bool TrainingData::TrainingDataLocker::_snapshot = false; 52 53 MethodTrainingData::MethodTrainingData() { 54 // Used by cppVtables.cpp only 55 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 56 } 57 58 KlassTrainingData::KlassTrainingData() { 59 // Used by cppVtables.cpp only 60 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 61 } 62 63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) { 64 // Used by cppVtables.cpp only 65 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 66 } 67 68 void TrainingData::initialize() { 69 // this is a nop if training modes are not enabled 70 if (have_data() || need_data()) { 71 // Data structures that we have do not currently support iterative training. So you cannot replay 72 // and train at the same time. Going forward we may want to adjust iteration/search to enable that. 73 guarantee(have_data() != need_data(), "Iterative training is not supported"); 74 TrainingDataLocker::initialize(); 75 } 76 } 77 78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) { 79 guarantee(TrainingData::Key::can_compute_cds_hash(k), ""); 80 TrainingData* td1 = TrainingData::lookup_archived_training_data(k); 81 guarantee(td == td1, ""); 82 } 83 84 void TrainingData::verify() { 85 if (TrainingData::have_data() && !TrainingData::assembling_data()) { 86 archived_training_data_dictionary()->iterate([&](TrainingData* td) { 87 if (td->is_KlassTrainingData()) { 88 KlassTrainingData* ktd = td->as_KlassTrainingData(); 89 if (ktd->has_holder() && ktd->holder()->is_loaded()) { 90 Key k(ktd->holder()); 91 verify_archived_entry(td, &k); 92 } 93 ktd->verify(); 94 } else if (td->is_MethodTrainingData()) { 95 MethodTrainingData* mtd = td->as_MethodTrainingData(); 96 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) { 97 Key k(mtd->holder()); 98 verify_archived_entry(td, &k); 99 } 100 mtd->verify(/*verify_dep_counter*/true); 101 } 102 }); 103 } 104 if (TrainingData::need_data()) { 105 TrainingDataLocker l; 106 training_data_set()->iterate([&](TrainingData* td) { 107 if (td->is_KlassTrainingData()) { 108 KlassTrainingData* ktd = td->as_KlassTrainingData(); 109 ktd->verify(); 110 } else if (td->is_MethodTrainingData()) { 111 MethodTrainingData* mtd = td->as_MethodTrainingData(); 112 // During the training run init deps tracking is not setup yet, 113 // don't verify it. 114 mtd->verify(/*verify_dep_counter*/false); 115 } 116 }); 117 } 118 } 119 120 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) { 121 MethodTrainingData* mtd = nullptr; 122 if (!have_data() && !need_data()) { 123 return mtd; 124 } 125 // Try grabbing the cached value first. 126 // Cache value is stored in MethodCounters and the following are the 127 // possible states: 128 // 1. Cached value is method_training_data_sentinel(). 129 // This is an initial state and needs a full lookup. 130 // 2. Cached value is null. 131 // Lookup failed the last time, if we don't plan to create a new TD object, 132 // i.e. null_if_no_found == true, then just return a null. 133 // 3. Cache value is not null. 134 // Return it, the value of training_data_lookup_failed doesn't matter. 135 MethodCounters* mcs = method->method_counters(); 136 if (mcs != nullptr) { 137 mtd = mcs->method_training_data(); 138 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) { 139 return mtd; 140 } 141 if (null_if_not_found && mtd == nullptr) { 142 assert(mtd == nullptr, "No training data found"); 143 return nullptr; 144 } 145 } else if (use_cache) { 146 mcs = Method::build_method_counters(Thread::current(), method()); 147 } 148 149 TrainingData* td = nullptr; 150 151 Key key(method()); 152 if (have_data()) { 153 td = lookup_archived_training_data(&key); 154 if (td != nullptr) { 155 mtd = td->as_MethodTrainingData(); 156 } else { 157 mtd = nullptr; 158 } 159 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 160 method->init_training_data(mtd); 161 } 162 163 if (need_data()) { 164 TrainingDataLocker l; 165 td = training_data_set()->find(&key); 166 if (td == nullptr) { 167 if (!null_if_not_found) { 168 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder()); 169 if (ktd == nullptr) { 170 return nullptr; // allocation failure 171 } 172 mtd = MethodTrainingData::allocate(method(), ktd); 173 if (mtd == nullptr) { 174 return nullptr; // allocation failure 175 } 176 td = training_data_set()->install(mtd); 177 assert(td == mtd, ""); 178 } else { 179 mtd = nullptr; 180 } 181 } else { 182 mtd = td->as_MethodTrainingData(); 183 } 184 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found) 185 method->init_training_data(mtd); 186 } 187 188 return mtd; 189 } 190 191 void MethodTrainingData::print_on(outputStream* st, bool name_only) const { 192 if (has_holder()) { 193 _klass->print_on(st, true); 194 st->print("."); 195 name()->print_symbol_on(st); 196 signature()->print_symbol_on(st); 197 } 198 if (name_only) { 199 return; 200 } 201 if (!has_holder()) { 202 st->print("[SYM]"); 203 } 204 if (_level_mask) { 205 st->print(" LM%d", _level_mask); 206 } 207 st->print(" mc=%p mdo=%p", _final_counters, _final_profile); 208 } 209 210 CompileTrainingData* CompileTrainingData::make(CompileTask* task) { 211 int level = task->comp_level(); 212 int compile_id = task->compile_id(); 213 Thread* thread = Thread::current(); 214 methodHandle m(thread, task->method()); 215 if (m->method_holder() == nullptr) { 216 return nullptr; // do not record (dynamically generated method) 217 } 218 MethodTrainingData* mtd = MethodTrainingData::make(m); 219 if (mtd == nullptr) { 220 return nullptr; // allocation failure 221 } 222 mtd->notice_compilation(level); 223 224 TrainingDataLocker l; 225 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id); 226 if (ctd != nullptr) { 227 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1]; 228 if (last_ctd != nullptr) { 229 assert(mtd->highest_top_level() >= level, "consistency"); 230 if (last_ctd->compile_id() < compile_id) { 231 last_ctd->clear_init_deps(); 232 last_ctd = ctd; 233 } 234 } else { 235 last_ctd = ctd; 236 mtd->notice_toplevel_compilation(level); 237 } 238 } 239 return ctd; 240 } 241 242 243 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) { 244 LogStreamHandle(Trace, training) log; 245 if (log.is_enabled()) { 246 log.print("CTD "); print_on(&log); log.cr(); 247 log.print("KTD "); ktd->print_on(&log); log.cr(); 248 } 249 assert(ktd!= nullptr && ktd->has_holder(), ""); 250 assert(_init_deps.contains(ktd), ""); 251 assert(_init_deps_left > 0, ""); 252 253 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1); 254 255 if (log.is_enabled()) { 256 uint init_deps_left2 = compute_init_deps_left(); 257 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2); 258 ktd->print_on(&log, true); 259 } 260 } 261 262 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) { 263 int left = 0; 264 for (int i = 0; i < _init_deps.length(); i++) { 265 KlassTrainingData* ktd = _init_deps.at(i); 266 // Ignore symbolic refs and already initialized classes (unless explicitly requested). 267 if (ktd->has_holder()) { 268 InstanceKlass* holder = ktd->holder(); 269 if (!ktd->holder()->is_initialized() || count_initialized) { 270 ++left; 271 } else if (holder->defined_by_other_loaders()) { 272 Key k(holder); 273 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) { 274 ++left; 275 } 276 } 277 } 278 } 279 return left; 280 } 281 282 void CompileTrainingData::print_on(outputStream* st, bool name_only) const { 283 _method->print_on(st, true); 284 st->print("#%dL%d", _compile_id, _level); 285 if (name_only) { 286 return; 287 } 288 if (_init_deps.length() > 0) { 289 if (_init_deps_left > 0) { 290 st->print(" udeps=%d", _init_deps_left); 291 } 292 for (int i = 0, len = _init_deps.length(); i < len; i++) { 293 st->print(" dep:"); 294 _init_deps.at(i)->print_on(st, true); 295 } 296 } 297 } 298 299 void CompileTrainingData::notice_inlined_method(CompileTask* task, 300 const methodHandle& method) { 301 MethodTrainingData* mtd = MethodTrainingData::make(method); 302 if (mtd != nullptr) { 303 mtd->notice_compilation(task->comp_level(), true); 304 } 305 } 306 307 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) { 308 // A JIT is starting to look at class k. 309 // We could follow the queries that it is making, but it is 310 // simpler to assume, conservatively, that the JIT will 311 // eventually depend on the initialization state of k. 312 CompileTask* task = env->task(); 313 assert(task != nullptr, ""); 314 Method* method = task->method(); 315 InstanceKlass* compiling_klass = method->method_holder(); 316 if (what->is_metadata()) { 317 ciMetadata* md = what->as_metadata(); 318 if (md->is_loaded() && md->is_instance_klass()) { 319 ciInstanceKlass* cik = md->as_instance_klass(); 320 321 if (cik->is_initialized()) { 322 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass(); 323 KlassTrainingData* ktd = KlassTrainingData::make(ik); 324 if (ktd == nullptr) { 325 // Allocation failure or snapshot in progress 326 return; 327 } 328 // This JIT task is (probably) requesting that ik be initialized, 329 // so add him to my _init_deps list. 330 TrainingDataLocker l; 331 add_init_dep(ktd); 332 } 333 } 334 } 335 } 336 337 void KlassTrainingData::prepare(Visitor& visitor) { 338 if (visitor.is_visited(this)) { 339 return; 340 } 341 visitor.visit(this); 342 ClassLoaderData* loader_data = nullptr; 343 if (_holder != nullptr) { 344 loader_data = _holder->class_loader_data(); 345 } else { 346 loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD 347 } 348 _comp_deps.prepare(loader_data); 349 } 350 351 void MethodTrainingData::prepare(Visitor& visitor) { 352 if (visitor.is_visited(this)) { 353 return; 354 } 355 visitor.visit(this); 356 klass()->prepare(visitor); 357 if (has_holder()) { 358 _final_counters = holder()->method_counters(); 359 _final_profile = holder()->method_data(); 360 assert(_final_profile == nullptr || _final_profile->method() == holder(), ""); 361 } 362 for (int i = 0; i < CompLevel_count - 1; i++) { 363 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 364 if (ctd != nullptr) { 365 ctd->prepare(visitor); 366 } 367 } 368 } 369 370 void CompileTrainingData::prepare(Visitor& visitor) { 371 if (visitor.is_visited(this)) { 372 return; 373 } 374 visitor.visit(this); 375 method()->prepare(visitor); 376 ClassLoaderData* loader_data = _method->klass()->class_loader_data(); 377 _init_deps.prepare(loader_data); 378 _ci_records.prepare(loader_data); 379 } 380 381 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) { 382 Key key(holder); 383 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr; 384 KlassTrainingData* ktd = nullptr; 385 if (td != nullptr) { 386 ktd = td->as_KlassTrainingData(); 387 guarantee(!ktd->has_holder() || ktd->holder() == holder, ""); 388 if (ktd->has_holder()) { 389 return ktd; 390 } else { 391 ktd = nullptr; 392 } 393 } 394 if (need_data()) { 395 TrainingDataLocker l; 396 td = training_data_set()->find(&key); 397 if (td == nullptr) { 398 if (null_if_not_found) { 399 return nullptr; 400 } 401 ktd = KlassTrainingData::allocate(holder); 402 if (ktd == nullptr) { 403 return nullptr; // allocation failure 404 } 405 td = training_data_set()->install(ktd); 406 assert(ktd == td, ""); 407 } else { 408 ktd = td->as_KlassTrainingData(); 409 guarantee(ktd->holder() != nullptr, "null holder"); 410 } 411 assert(ktd != nullptr, ""); 412 guarantee(ktd->holder() == holder, ""); 413 } 414 return ktd; 415 } 416 417 void KlassTrainingData::print_on(outputStream* st, bool name_only) const { 418 if (has_holder()) { 419 name()->print_symbol_on(st); 420 switch (holder()->init_state()) { 421 case InstanceKlass::allocated: st->print("[A]"); break; 422 case InstanceKlass::loaded: st->print("[D]"); break; 423 case InstanceKlass::linked: st->print("[L]"); break; 424 case InstanceKlass::being_initialized: st->print("[i]"); break; 425 case InstanceKlass::fully_initialized: break; 426 case InstanceKlass::initialization_error: st->print("[E]"); break; 427 default: fatal("unknown state: %d", holder()->init_state()); 428 } 429 if (holder()->is_interface()) { 430 st->print("I"); 431 } 432 } else { 433 st->print("[SYM]"); 434 } 435 if (name_only) { 436 return; 437 } 438 if (_comp_deps.length() > 0) { 439 for (int i = 0, len = _comp_deps.length(); i < len; i++) { 440 st->print(" dep:"); 441 _comp_deps.at(i)->print_on(st, true); 442 } 443 } 444 } 445 446 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) { 447 assert(klass != nullptr, ""); 448 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve 449 // the handle object. 450 OopHandle handle(Universe::vm_global(), klass->java_mirror()); 451 _holder = klass; 452 assert(holder() == klass, ""); 453 } 454 455 void KlassTrainingData::notice_fully_initialized() { 456 ResourceMark rm; 457 assert(has_holder(), ""); 458 assert(holder()->is_initialized(), "wrong state: %s %s", 459 holder()->name()->as_C_string(), holder()->init_state_name()); 460 461 TrainingDataLocker l; // Not a real lock if we don't collect the data, 462 // that's why we need the atomic decrement below. 463 for (int i = 0; i < comp_dep_count(); i++) { 464 comp_dep(i)->dec_init_deps_left_release(this); 465 } 466 holder()->set_has_init_deps_processed(); 467 } 468 469 void TrainingData::init_dumptime_table(TRAPS) { 470 precond((!assembling_data() && !need_data()) || need_data() != assembling_data()); 471 if (assembling_data()) { 472 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 473 _archived_training_data_dictionary.iterate([&](TrainingData* record) { 474 _dumptime_training_data_dictionary->append(record); 475 }); 476 } 477 if (need_data()) { 478 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 479 TrainingDataLocker l; 480 TrainingDataLocker::snapshot(); 481 482 ResourceMark rm; 483 Visitor visitor(training_data_set()->size()); 484 training_data_set()->iterate([&](TrainingData* td) { 485 td->prepare(visitor); 486 if (!td->is_CompileTrainingData()) { 487 _dumptime_training_data_dictionary->append(td); 488 } 489 }); 490 } 491 492 if (AOTVerifyTrainingData) { 493 TrainingData::verify(); 494 } 495 } 496 497 void TrainingData::iterate_roots(MetaspaceClosure* it) { 498 if (_dumptime_training_data_dictionary != nullptr) { 499 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 500 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it); 501 } 502 } 503 } 504 505 void TrainingData::dump_training_data() { 506 if (_dumptime_training_data_dictionary != nullptr) { 507 CompactHashtableStats stats; 508 _archived_training_data_dictionary_for_dumping.reset(); 509 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats); 510 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 511 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 512 #ifdef ASSERT 513 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) { 514 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data(); 515 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict"); 516 } 517 #endif // ASSERT 518 td = ArchiveBuilder::current()->get_buffered_addr(td); 519 uint hash = TrainingData::Key::cds_hash(td->key()); 520 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td); 521 writer.add(hash, delta); 522 } 523 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary"); 524 } 525 } 526 527 void TrainingData::cleanup_training_data() { 528 if (_dumptime_training_data_dictionary != nullptr) { 529 ResourceMark rm; 530 Visitor visitor(_dumptime_training_data_dictionary->length()); 531 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 532 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 533 td->cleanup(visitor); 534 } 535 // Throw away all elements with empty keys 536 int j = 0; 537 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 538 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 539 if (td->key()->is_empty()) { 540 continue; 541 } 542 if (i != j) { // no need to copy if it's the same 543 _dumptime_training_data_dictionary->at_put(j, td); 544 } 545 j++; 546 } 547 _dumptime_training_data_dictionary->trunc_to(j); 548 } 549 } 550 551 void KlassTrainingData::cleanup(Visitor& visitor) { 552 if (visitor.is_visited(this)) { 553 return; 554 } 555 visitor.visit(this); 556 if (has_holder()) { 557 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr); 558 if (is_excluded) { 559 ResourceMark rm; 560 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name()); 561 _holder = nullptr; 562 key()->make_empty(); 563 } 564 } 565 for (int i = 0; i < _comp_deps.length(); i++) { 566 _comp_deps.at(i)->cleanup(visitor); 567 } 568 } 569 570 void MethodTrainingData::cleanup(Visitor& visitor) { 571 if (visitor.is_visited(this)) { 572 return; 573 } 574 visitor.visit(this); 575 if (has_holder()) { 576 if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) { 577 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 578 if (_final_profile != nullptr && _final_profile->method() != _holder) { 579 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 580 } 581 _final_profile = nullptr; 582 _final_counters = nullptr; 583 _holder = nullptr; 584 key()->make_empty(); 585 } 586 } 587 for (int i = 0; i < CompLevel_count - 1; i++) { 588 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 589 if (ctd != nullptr) { 590 ctd->cleanup(visitor); 591 } 592 } 593 } 594 595 void KlassTrainingData::verify() { 596 for (int i = 0; i < comp_dep_count(); i++) { 597 CompileTrainingData* ctd = comp_dep(i); 598 if (!ctd->_init_deps.contains(this)) { 599 print_on(tty); tty->cr(); 600 ctd->print_on(tty); tty->cr(); 601 } 602 guarantee(ctd->_init_deps.contains(this), ""); 603 } 604 } 605 606 void MethodTrainingData::verify(bool verify_dep_counter) { 607 iterate_compiles([&](CompileTrainingData* ctd) { 608 ctd->verify(verify_dep_counter); 609 }); 610 } 611 612 void CompileTrainingData::verify(bool verify_dep_counter) { 613 for (int i = 0; i < init_dep_count(); i++) { 614 KlassTrainingData* ktd = init_dep(i); 615 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) { 616 LogStreamHandle(Warning, training) log; 617 if (log.is_enabled()) { 618 ResourceMark rm; 619 log.print("CTD "); print_value_on(&log); 620 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string()); 621 } 622 } 623 if (!ktd->_comp_deps.contains(this)) { 624 print_on(tty); tty->cr(); 625 ktd->print_on(tty); tty->cr(); 626 } 627 guarantee(ktd->_comp_deps.contains(this), ""); 628 } 629 630 if (verify_dep_counter) { 631 int init_deps_left1 = init_deps_left_acquire(); 632 int init_deps_left2 = compute_init_deps_left(); 633 634 bool invariant = (init_deps_left1 >= init_deps_left2); 635 if (!invariant) { 636 print_on(tty); 637 tty->cr(); 638 } 639 guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2); 640 } 641 } 642 643 void CompileTrainingData::cleanup(Visitor& visitor) { 644 if (visitor.is_visited(this)) { 645 return; 646 } 647 visitor.visit(this); 648 method()->cleanup(visitor); 649 } 650 651 void TrainingData::serialize(SerializeClosure* soc) { 652 if (soc->writing()) { 653 _archived_training_data_dictionary_for_dumping.serialize_header(soc); 654 } else { 655 _archived_training_data_dictionary.serialize_header(soc); 656 } 657 } 658 659 class TrainingDataPrinter : StackObj { 660 outputStream* _st; 661 int _index; 662 public: 663 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {} 664 void do_value(TrainingData* td) { 665 const char* type = (td->is_KlassTrainingData() ? "K" : 666 td->is_MethodTrainingData() ? "M" : 667 td->is_CompileTrainingData() ? "C" : "?"); 668 _st->print("%4d: %p %s ", _index++, td, type); 669 td->print_on(_st); 670 _st->cr(); 671 if (td->is_KlassTrainingData()) { 672 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) { 673 ResourceMark rm; 674 _st->print_raw(" C "); 675 ctd->print_on(_st); 676 _st->cr(); 677 }); 678 } else if (td->is_MethodTrainingData()) { 679 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) { 680 ResourceMark rm; 681 _st->print_raw(" C "); 682 ctd->print_on(_st); 683 _st->cr(); 684 }); 685 } else if (td->is_CompileTrainingData()) { 686 // ? 687 } 688 } 689 }; 690 691 void TrainingData::print_archived_training_data_on(outputStream* st) { 692 st->print_cr("Archived TrainingData Dictionary"); 693 TrainingDataPrinter tdp(st); 694 TrainingDataLocker::initialize(); 695 _archived_training_data_dictionary.iterate(&tdp); 696 } 697 698 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) { 699 iter->push(const_cast<Metadata**>(&_meta)); 700 } 701 702 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 703 _key.metaspace_pointers_do(iter); 704 } 705 706 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) { 707 return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta()); 708 } 709 710 uint TrainingData::Key::cds_hash(const Key* const& k) { 711 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta()); 712 } 713 714 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) { 715 // For this to work, all components of the key must be in shared metaspace. 716 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) { 717 return nullptr; 718 } 719 uint hash = TrainingData::Key::cds_hash(k); 720 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/); 721 if (td != nullptr) { 722 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) || 723 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) { 724 return td; 725 } else { 726 ShouldNotReachHere(); 727 } 728 } 729 return nullptr; 730 } 731 732 template <typename T> 733 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) { 734 iter->push(&_deps); 735 } 736 737 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 738 log_trace(aot, training)("Iter(KlassTrainingData): %p", this); 739 TrainingData::metaspace_pointers_do(iter); 740 _comp_deps.metaspace_pointers_do(iter); 741 iter->push(&_holder); 742 } 743 744 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 745 log_trace(aot, training)("Iter(MethodTrainingData): %p", this); 746 TrainingData::metaspace_pointers_do(iter); 747 iter->push(&_klass); 748 iter->push((Method**)&_holder); 749 for (int i = 0; i < CompLevel_count - 1; i++) { 750 iter->push(&_last_toplevel_compiles[i]); 751 } 752 iter->push(&_final_profile); 753 iter->push(&_final_counters); 754 } 755 756 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 757 log_trace(aot, training)("Iter(CompileTrainingData): %p", this); 758 TrainingData::metaspace_pointers_do(iter); 759 _init_deps.metaspace_pointers_do(iter); 760 _ci_records.metaspace_pointers_do(iter); 761 iter->push(&_method); 762 } 763 764 template <typename T> 765 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) { 766 if (_deps == nullptr && _deps_dyn != nullptr) { 767 int len = _deps_dyn->length(); 768 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared); 769 for (int i = 0; i < len; i++) { 770 _deps->at_put(i, _deps_dyn->at(i)); // copy 771 } 772 } 773 } 774 775 void KlassTrainingData::remove_unshareable_info() { 776 TrainingData::remove_unshareable_info(); 777 _comp_deps.remove_unshareable_info(); 778 } 779 780 void MethodTrainingData::remove_unshareable_info() { 781 TrainingData::remove_unshareable_info(); 782 if (_final_counters != nullptr) { 783 _final_counters->remove_unshareable_info(); 784 } 785 if (_final_profile != nullptr) { 786 _final_profile->remove_unshareable_info(); 787 } 788 } 789 790 void CompileTrainingData::remove_unshareable_info() { 791 TrainingData::remove_unshareable_info(); 792 _init_deps.remove_unshareable_info(); 793 _ci_records.remove_unshareable_info(); 794 _init_deps_left = compute_init_deps_left(true); 795 }