1 /* 2 * Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include <cds/archiveBuilder.hpp> 26 #include <classfile/systemDictionaryShared.hpp> 27 #include <compiler/compileBroker.hpp> 28 #include "precompiled.hpp" 29 #include "ci/ciEnv.hpp" 30 #include "ci/ciMetadata.hpp" 31 #include "cds/archiveBuilder.hpp" 32 #include "cds/cdsConfig.hpp" 33 #include "cds/metaspaceShared.hpp" 34 #include "cds/methodDataDictionary.hpp" 35 #include "cds/runTimeClassInfo.hpp" 36 #include "classfile/classLoaderData.hpp" 37 #include "classfile/compactHashtable.hpp" 38 #include "classfile/javaClasses.hpp" 39 #include "classfile/symbolTable.hpp" 40 #include "classfile/systemDictionaryShared.hpp" 41 #include "compiler/compileTask.hpp" 42 #include "memory/metadataFactory.hpp" 43 #include "memory/metaspaceClosure.hpp" 44 #include "memory/resourceArea.hpp" 45 #include "oops/fieldStreams.inline.hpp" 46 #include "oops/method.hpp" 47 #include "oops/methodCounters.hpp" 48 #include "oops/recompilationSchedule.hpp" 49 #include "oops/trainingData.hpp" 50 #include "runtime/arguments.hpp" 51 #include "runtime/fieldDescriptor.inline.hpp" 52 #include "runtime/javaThread.inline.hpp" 53 #include "runtime/jniHandles.inline.hpp" 54 #include "runtime/os.hpp" 55 #include "utilities/growableArray.hpp" 56 #include "utilities/xmlstream.hpp" 57 58 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff); 59 TrainingDataDictionary TrainingData::_archived_training_data_dictionary; 60 TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping; 61 GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared>* TrainingData::_dumptime_training_data_dictionary = nullptr; 62 int TrainingData::TrainingDataLocker::_lock_mode; 63 volatile bool TrainingData::TrainingDataLocker::_snapshot = false; 64 65 MethodTrainingData::MethodTrainingData() { 66 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 67 } 68 69 KlassTrainingData::KlassTrainingData() { 70 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 71 } 72 73 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) { 74 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 75 } 76 77 void TrainingData::initialize() { 78 // this is a nop if training modes are not enabled 79 if (have_data() || need_data()) { 80 TrainingDataLocker::initialize(); 81 } 82 RecompilationSchedule::initialize(); 83 } 84 85 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) { 86 guarantee(TrainingData::Key::can_compute_cds_hash(k), ""); 87 TrainingData* td1 = TrainingData::lookup_archived_training_data(k); 88 guarantee(td == td1, ""); 89 } 90 91 void TrainingData::verify() { 92 if (TrainingData::have_data()) { 93 archived_training_data_dictionary()->iterate([&](TrainingData* td) { 94 if (td->is_KlassTrainingData()) { 95 KlassTrainingData* ktd = td->as_KlassTrainingData(); 96 if (ktd->has_holder() && ktd->holder()->is_loaded()) { 97 Key k(ktd->holder()); 98 verify_archived_entry(td, &k); 99 } 100 ktd->verify(); 101 } else if (td->is_MethodTrainingData()) { 102 MethodTrainingData* mtd = td->as_MethodTrainingData(); 103 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) { 104 Key k(mtd->holder()); 105 verify_archived_entry(td, &k); 106 } 107 mtd->verify(); 108 } else if (td->is_CompileTrainingData()) { 109 td->as_CompileTrainingData()->verify(); 110 } 111 }); 112 } 113 } 114 115 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, 116 bool null_if_not_found) { 117 MethodTrainingData* mtd = nullptr; 118 if (!have_data() && !need_data()) { 119 return mtd; 120 } 121 // Try grabbing the cached value first. 122 MethodCounters* mcs = method->method_counters(); 123 if (mcs != nullptr) { 124 mtd = mcs->method_training_data(); 125 if (mtd != nullptr) { 126 return mtd; 127 } 128 } else { 129 mcs = Method::build_method_counters(Thread::current(), method()); 130 } 131 132 KlassTrainingData* holder = KlassTrainingData::make(method->method_holder(), null_if_not_found); 133 if (holder == nullptr) { 134 return nullptr; // allocation failure 135 } 136 Key key(method()); 137 TrainingData* td = have_data()? lookup_archived_training_data(&key) : nullptr; 138 if (td != nullptr) { 139 mtd = td->as_MethodTrainingData(); 140 method->init_training_data(mtd); // Cache the pointer for next time. 141 return mtd; 142 } else { 143 TrainingDataLocker l; 144 td = training_data_set()->find(&key); 145 if (td == nullptr && null_if_not_found) { 146 return nullptr; 147 } 148 if (td != nullptr) { 149 mtd = td->as_MethodTrainingData(); 150 method->init_training_data(mtd); // Cache the pointer for next time. 151 return mtd; 152 } 153 } 154 assert(td == nullptr && mtd == nullptr && !null_if_not_found, "Should return if have result"); 155 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder()); 156 if (ktd != nullptr) { 157 TrainingDataLocker l; 158 td = training_data_set()->find(&key); 159 if (td == nullptr) { 160 mtd = MethodTrainingData::allocate(method(), ktd); 161 if (mtd == nullptr) { 162 return nullptr; // allocation failure 163 } 164 td = training_data_set()->install(mtd); 165 assert(td == mtd, ""); 166 } else { 167 mtd = td->as_MethodTrainingData(); 168 } 169 method->init_training_data(mtd); 170 } 171 return mtd; 172 } 173 174 void MethodTrainingData::print_on(outputStream* st, bool name_only) const { 175 _klass->print_on(st, true); 176 st->print("."); 177 name()->print_symbol_on(st); 178 signature()->print_symbol_on(st); 179 if (name_only) { 180 return; 181 } 182 if (!has_holder()) { 183 st->print("[SYM]"); 184 } 185 if (_level_mask) { 186 st->print(" LM%d", _level_mask); 187 } 188 st->print(" mc=%p mdo=%p", _final_counters, _final_profile); 189 } 190 191 CompileTrainingData* CompileTrainingData::make(CompileTask* task) { 192 int level = task->comp_level(); 193 int compile_id = task->compile_id(); 194 Thread* thread = Thread::current(); 195 methodHandle m(thread, task->method()); 196 MethodTrainingData* mtd = MethodTrainingData::make(m); 197 if (mtd == nullptr) { 198 return nullptr; // allocation failure 199 } 200 mtd->notice_compilation(level); 201 202 TrainingDataLocker l; 203 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id); 204 if (ctd != nullptr) { 205 if (mtd->_last_toplevel_compiles[level - 1] != nullptr) { 206 if (mtd->_last_toplevel_compiles[level - 1]->compile_id() < compile_id) { 207 mtd->_last_toplevel_compiles[level - 1]->clear_init_deps(); 208 mtd->_last_toplevel_compiles[level - 1] = ctd; 209 mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level); 210 } 211 } else { 212 mtd->_last_toplevel_compiles[level - 1] = ctd; 213 mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level); 214 } 215 } 216 return ctd; 217 } 218 219 220 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) { 221 LogStreamHandle(Trace, training) log; 222 if (log.is_enabled()) { 223 log.print("CTD "); print_on(&log); log.cr(); 224 log.print("KTD "); ktd->print_on(&log); log.cr(); 225 } 226 assert(ktd!= nullptr && ktd->has_holder(), ""); 227 assert(_init_deps.contains(ktd), ""); 228 assert(_init_deps_left > 0, ""); 229 230 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1); 231 232 if (log.is_enabled()) { 233 uint init_deps_left2 = compute_init_deps_left(); 234 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2); 235 ktd->print_on(&log, true); 236 } 237 } 238 239 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) { 240 int left = 0; 241 for (int i = 0; i < _init_deps.length(); i++) { 242 KlassTrainingData* ktd = _init_deps.at(i); 243 // Ignore symbolic refs and already initialized classes (unless explicitly requested). 244 if (ktd->has_holder()) { 245 InstanceKlass* holder = ktd->holder(); 246 if (!ktd->holder()->is_initialized() || count_initialized) { 247 ++left; 248 } else if (holder->is_shared_unregistered_class()) { 249 Key k(holder); 250 if (!Key::can_compute_cds_hash(&k)) { 251 ++left; // FIXME: !!! init tracking doesn't work well for custom loaders !!! 252 } 253 } 254 } 255 } 256 return left; 257 } 258 259 void CompileTrainingData::print_on(outputStream* st, bool name_only) const { 260 _method->print_on(st, true); 261 st->print("#%dL%d", _compile_id, _level); 262 if (name_only) { 263 return; 264 } 265 #define MAYBE_TIME(Q, _qtime) \ 266 if (_qtime != 0) st->print(" " #Q "%.3f", _qtime) 267 MAYBE_TIME(Q, _qtime); 268 MAYBE_TIME(S, _stime); 269 MAYBE_TIME(E, _etime); 270 if (_init_deps.length() > 0) { 271 if (_init_deps_left > 0) { 272 st->print(" udeps=%d", _init_deps_left); 273 } 274 for (int i = 0, len = _init_deps.length(); i < len; i++) { 275 st->print(" dep:"); 276 _init_deps.at(i)->print_on(st, true); 277 } 278 } 279 } 280 281 void CompileTrainingData::record_compilation_queued(CompileTask* task) { 282 _qtime = tty->time_stamp().seconds(); 283 } 284 void CompileTrainingData::record_compilation_start(CompileTask* task) { 285 _stime = tty->time_stamp().seconds(); 286 } 287 void CompileTrainingData::record_compilation_end(CompileTask* task) { 288 _etime = tty->time_stamp().seconds(); 289 if (task->is_success()) { // record something about the nmethod output 290 _nm_total_size = task->nm_total_size(); 291 } 292 } 293 void CompileTrainingData::notice_inlined_method(CompileTask* task, 294 const methodHandle& method) { 295 MethodTrainingData* mtd = MethodTrainingData::make(method); 296 if (mtd != nullptr) { 297 mtd->notice_compilation(task->comp_level(), true); 298 } 299 } 300 301 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) { 302 // A JIT is starting to look at class k. 303 // We could follow the queries that it is making, but it is 304 // simpler to assume, conservatively, that the JIT will 305 // eventually depend on the initialization state of k. 306 CompileTask* task = env->task(); 307 assert(task != nullptr, ""); 308 Method* method = task->method(); 309 InstanceKlass* compiling_klass = method->method_holder(); 310 if (what->is_metadata()) { 311 ciMetadata* md = what->as_metadata(); 312 if (md->is_loaded() && md->is_instance_klass()) { 313 ciInstanceKlass* cik = md->as_instance_klass(); 314 315 if (cik->is_initialized()) { 316 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass(); 317 KlassTrainingData* ktd = KlassTrainingData::make(ik); 318 if (ktd == nullptr) { 319 // Allocation failure or snapshot in progress 320 return; 321 } 322 // This JIT task is (probably) requesting that ik be initialized, 323 // so add him to my _init_deps list. 324 TrainingDataLocker l; 325 add_init_dep(ktd); 326 } 327 } 328 } 329 } 330 331 void KlassTrainingData::prepare(Visitor& visitor) { 332 if (visitor.is_visited(this)) { 333 return; 334 } 335 visitor.visit(this); 336 ClassLoaderData* loader_data = nullptr; 337 if (_holder != nullptr) { 338 loader_data = _holder->class_loader_data(); 339 } else { 340 loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD 341 } 342 _comp_deps.prepare(loader_data); 343 } 344 345 void MethodTrainingData::prepare(Visitor& visitor) { 346 if (visitor.is_visited(this)) { 347 return; 348 } 349 visitor.visit(this); 350 klass()->prepare(visitor); 351 if (has_holder()) { 352 _final_counters = holder()->method_counters(); 353 _final_profile = holder()->method_data(); 354 assert(_final_profile == nullptr || _final_profile->method() == holder(), ""); 355 } 356 for (int i = 0; i < CompLevel_count; i++) { 357 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 358 if (ctd != nullptr) { 359 ctd->prepare(visitor); 360 } 361 } 362 } 363 364 void CompileTrainingData::prepare(Visitor& visitor) { 365 if (visitor.is_visited(this)) { 366 return; 367 } 368 visitor.visit(this); 369 method()->prepare(visitor); 370 ClassLoaderData* loader_data = _method->klass()->class_loader_data(); 371 _init_deps.prepare(loader_data); 372 _ci_records.prepare(loader_data); 373 } 374 375 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) { 376 Key key(holder); 377 TrainingData* td = have_data() ? lookup_archived_training_data(&key) : nullptr; 378 KlassTrainingData* ktd = nullptr; 379 if (td != nullptr) { 380 ktd = td->as_KlassTrainingData(); 381 guarantee(!ktd->has_holder() || ktd->holder() == holder, ""); 382 if (ktd->has_holder()) { 383 return ktd; 384 } 385 } 386 TrainingDataLocker l; 387 td = training_data_set()->find(&key); 388 if (td == nullptr) { 389 if (null_if_not_found) { 390 return nullptr; 391 } 392 ktd = KlassTrainingData::allocate(holder); 393 if (ktd == nullptr) { 394 return nullptr; // allocation failure 395 } 396 td = training_data_set()->install(ktd); 397 assert(ktd == td, ""); 398 } else { 399 ktd = td->as_KlassTrainingData(); 400 guarantee(ktd->holder() != nullptr, "null holder"); 401 } 402 assert(ktd != nullptr, ""); 403 guarantee(ktd->holder() == holder, ""); 404 return ktd; 405 } 406 407 void KlassTrainingData::print_on(outputStream* st, bool name_only) const { 408 name()->print_symbol_on(st); 409 if (has_holder()) { 410 switch (holder()->init_state()) { 411 case InstanceKlass::allocated: st->print("[A]"); break; 412 case InstanceKlass::loaded: st->print("[D]"); break; 413 case InstanceKlass::linked: st->print("[L]"); break; 414 case InstanceKlass::being_initialized: st->print("[i]"); break; 415 case InstanceKlass::fully_initialized: /*st->print("");*/ break; 416 case InstanceKlass::initialization_error: st->print("[E]"); break; 417 default: fatal("unknown state: %d", holder()->init_state()); 418 } 419 if (holder()->is_interface()) { 420 st->print("I"); 421 } 422 } else { 423 st->print("[SYM]"); 424 } 425 if (name_only) { 426 return; 427 } 428 if (_comp_deps.length() > 0) { 429 for (int i = 0, len = _comp_deps.length(); i < len; i++) { 430 st->print(" dep:"); 431 _comp_deps.at(i)->print_on(st, true); 432 } 433 } 434 } 435 436 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) { 437 if (holder() == klass) { 438 return; // no change to make 439 } 440 441 jobject hmj = _holder_mirror; 442 if (hmj != nullptr) { // clear out previous handle, if any 443 _holder_mirror = nullptr; 444 assert(JNIHandles::is_global_handle(hmj), ""); 445 JNIHandles::destroy_global(hmj); 446 } 447 448 // Keep the klass alive during the training run, unconditionally. 449 // 450 // FIXME: Revisit this decision; we could allow training runs to 451 // unload classes in the normal way. We might use make_weak_global 452 // instead of make_global. 453 // 454 // The data from the training run would mention the name of the 455 // unloaded class (and of its loader). Is it worth the complexity 456 // to track and then unload classes, remembering just their names? 457 458 if (klass != nullptr) { 459 Handle hm(JavaThread::current(), klass->java_mirror()); 460 hmj = JNIHandles::make_global(hm); 461 Atomic::release_store(&_holder_mirror, hmj); 462 } 463 464 Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass)); 465 assert(holder() == klass, ""); 466 } 467 468 void KlassTrainingData::notice_fully_initialized() { 469 ResourceMark rm; 470 assert(has_holder(), ""); 471 assert(holder()->is_initialized(), "wrong state: %s %s", 472 holder()->name()->as_C_string(), holder()->init_state_name()); 473 474 TrainingDataLocker l; // Not a real lock if we don't collect the data, 475 // that's why we need the atomic decrement below. 476 for (int i = 0; i < comp_dep_count(); i++) { 477 comp_dep(i)->dec_init_deps_left(this); 478 } 479 holder()->set_has_init_deps_processed(); 480 } 481 482 void TrainingData::init_dumptime_table(TRAPS) { 483 if (!need_data()) { 484 return; 485 } 486 _dumptime_training_data_dictionary = new GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared>(); 487 if (CDSConfig::is_dumping_final_static_archive()) { 488 _archived_training_data_dictionary.iterate([&](TrainingData* record) { 489 _dumptime_training_data_dictionary->append(record); 490 }); 491 } else { 492 TrainingDataLocker l; 493 TrainingDataLocker::snapshot(); 494 495 ResourceMark rm; 496 Visitor visitor(training_data_set()->size()); 497 training_data_set()->iterate_all([&](const TrainingData::Key* k, TrainingData* td) { 498 td->prepare(visitor); 499 if (!td->is_CompileTrainingData()) { 500 _dumptime_training_data_dictionary->append(td); 501 } 502 }); 503 504 if (VerifyTrainingData) { 505 training_data_set()->verify(); 506 } 507 } 508 509 RecompilationSchedule::prepare(CHECK); 510 } 511 512 #if INCLUDE_CDS 513 void TrainingData::iterate_roots(MetaspaceClosure* it) { 514 if (!need_data()) { 515 return; 516 } 517 assert(_dumptime_training_data_dictionary != nullptr, ""); 518 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 519 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it); 520 } 521 RecompilationSchedule::iterate_roots(it); 522 } 523 524 void TrainingData::dump_training_data() { 525 if (!need_data()) { 526 return; 527 } 528 write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping); 529 } 530 531 void TrainingData::cleanup_training_data() { 532 if (_dumptime_training_data_dictionary != nullptr) { 533 ResourceMark rm; 534 Visitor visitor(_dumptime_training_data_dictionary->length()); 535 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 536 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 537 td->cleanup(visitor); 538 } 539 // Throw away all elements with empty keys 540 int j = 0; 541 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 542 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 543 if (td->key()->is_empty()) { 544 continue; 545 } 546 if (i != j) { // no need to copy if it's the same 547 _dumptime_training_data_dictionary->at_put(j, td); 548 } 549 j++; 550 } 551 _dumptime_training_data_dictionary->trunc_to(j); 552 } 553 RecompilationSchedule::cleanup(); 554 } 555 556 void KlassTrainingData::cleanup(Visitor& visitor) { 557 if (visitor.is_visited(this)) { 558 return; 559 } 560 visitor.visit(this); 561 if (has_holder()) { 562 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr); 563 if (is_excluded) { 564 ResourceMark rm; 565 log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name()); 566 _holder = nullptr; 567 key()->make_empty(); 568 } 569 } 570 for (int i = 0; i < _comp_deps.length(); i++) { 571 _comp_deps.at(i)->cleanup(visitor); 572 } 573 } 574 575 void MethodTrainingData::cleanup(Visitor& visitor) { 576 if (visitor.is_visited(this)) { 577 return; 578 } 579 visitor.visit(this); 580 if (has_holder()) { 581 if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) { 582 log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 583 if (_final_profile != nullptr && _final_profile->method() != _holder) { 584 log_warning(cds)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 585 } 586 _holder = nullptr; 587 key()->make_empty(); 588 } 589 } 590 for (int i = 0; i < CompLevel_count; i++) { 591 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 592 if (ctd != nullptr) { 593 ctd->cleanup(visitor); 594 } 595 } 596 } 597 598 void KlassTrainingData::verify() { 599 for (int i = 0; i < comp_dep_count(); i++) { 600 CompileTrainingData* ctd = comp_dep(i); 601 if (!ctd->_init_deps.contains(this)) { 602 print_on(tty); tty->cr(); 603 ctd->print_on(tty); tty->cr(); 604 } 605 guarantee(ctd->_init_deps.contains(this), ""); 606 } 607 } 608 609 void MethodTrainingData::verify() { 610 iterate_all_compiles([](CompileTrainingData* ctd) { 611 ctd->verify(); 612 613 int init_deps_left1 = ctd->init_deps_left(); 614 int init_deps_left2 = ctd->compute_init_deps_left(); 615 616 if (init_deps_left1 != init_deps_left2) { 617 ctd->print_on(tty); tty->cr(); 618 } 619 guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d", 620 init_deps_left1, init_deps_left2, ctd->init_deps_left()); 621 }); 622 } 623 624 void CompileTrainingData::verify() { 625 for (int i = 0; i < init_dep_count(); i++) { 626 KlassTrainingData* ktd = init_dep(i); 627 if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) { 628 LogStreamHandle(Warning, training) log; 629 if (log.is_enabled()) { 630 ResourceMark rm; 631 log.print("CTD "); print_value_on(&log); 632 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string()); 633 } 634 } 635 if (!ktd->_comp_deps.contains(this)) { 636 print_on(tty); tty->cr(); 637 ktd->print_on(tty); tty->cr(); 638 } 639 guarantee(ktd->_comp_deps.contains(this), ""); 640 } 641 } 642 643 void CompileTrainingData::cleanup(Visitor& visitor) { 644 if (visitor.is_visited(this)) { 645 return; 646 } 647 visitor.visit(this); 648 method()->cleanup(visitor); 649 } 650 651 void TrainingData::serialize_training_data(SerializeClosure* soc) { 652 if (soc->writing()) { 653 _archived_training_data_dictionary_for_dumping.serialize_header(soc); 654 } else { 655 _archived_training_data_dictionary.serialize_header(soc); 656 } 657 RecompilationSchedule::serialize_training_data(soc); 658 } 659 660 void TrainingData::print_archived_training_data_on(outputStream* st) { 661 st->print_cr("Archived TrainingData Dictionary"); 662 TrainingDataPrinter tdp(st); 663 TrainingDataLocker::initialize(); 664 _archived_training_data_dictionary.iterate(&tdp); 665 RecompilationSchedule::print_archived_training_data_on(st); 666 } 667 668 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) { 669 iter->push(const_cast<Metadata**>(&_meta)); 670 } 671 672 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 673 _key.metaspace_pointers_do(iter); 674 } 675 676 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) { 677 return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta()); 678 } 679 680 uint TrainingData::Key::cds_hash(const Key* const& k) { 681 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta()); 682 } 683 684 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) { 685 if (!need_data()) { 686 return; 687 } 688 assert(_dumptime_training_data_dictionary != nullptr, ""); 689 CompactHashtableStats stats; 690 dictionary->reset(); 691 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats); 692 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 693 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 694 #ifdef ASSERT 695 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) { 696 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data(); 697 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict"); 698 } 699 #endif // ASSERT 700 td = ArchiveBuilder::current()->get_buffered_addr(td); 701 uint hash = TrainingData::Key::cds_hash(td->key()); 702 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td); 703 writer.add(hash, delta); 704 } 705 writer.dump(dictionary, "training data dictionary"); 706 } 707 708 size_t TrainingData::estimate_size_for_archive() { 709 if (_dumptime_training_data_dictionary != nullptr) { 710 return CompactHashtableWriter::estimate_size(_dumptime_training_data_dictionary->length()); 711 } else { 712 return 0; 713 } 714 } 715 716 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) { 717 // For this to work, all components of the key must be in shared metaspace. 718 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) { 719 return nullptr; 720 } 721 uint hash = TrainingData::Key::cds_hash(k); 722 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/); 723 if (td != nullptr) { 724 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) || 725 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) { 726 return td; 727 } else { 728 ShouldNotReachHere(); 729 } 730 } 731 return nullptr; 732 } 733 #endif 734 735 KlassTrainingData* TrainingData::lookup_for(InstanceKlass* ik) { 736 if (TrainingData::have_data() && ik != nullptr && ik->is_loaded()) { 737 TrainingData::Key key(ik); 738 TrainingData* td = TrainingData::lookup_archived_training_data(&key); 739 if (td != nullptr && td->is_KlassTrainingData()) { 740 return td->as_KlassTrainingData(); 741 } 742 } 743 return nullptr; 744 } 745 746 MethodTrainingData* TrainingData::lookup_for(Method* m) { 747 if (TrainingData::have_data() && m != nullptr) { 748 KlassTrainingData* holder_ktd = TrainingData::lookup_for(m->method_holder()); 749 if (holder_ktd != nullptr) { 750 TrainingData::Key key(m); 751 TrainingData* td = TrainingData::lookup_archived_training_data(&key); 752 if (td != nullptr && td->is_MethodTrainingData()) { 753 return td->as_MethodTrainingData(); 754 } 755 } 756 } 757 return nullptr; 758 } 759 760 template <typename T> 761 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) { 762 iter->push(&_deps); 763 } 764 765 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 766 log_trace(cds)("Iter(KlassTrainingData): %p", this); 767 TrainingData::metaspace_pointers_do(iter); 768 _comp_deps.metaspace_pointers_do(iter); 769 iter->push(&_holder); 770 } 771 772 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 773 log_trace(cds)("Iter(MethodTrainingData): %p", this); 774 TrainingData::metaspace_pointers_do(iter); 775 iter->push(&_klass); 776 iter->push((Method**)&_holder); 777 for (int i = 0; i < CompLevel_count; i++) { 778 iter->push(&_last_toplevel_compiles[i]); 779 } 780 iter->push(&_final_profile); 781 iter->push(&_final_counters); 782 } 783 784 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 785 log_trace(cds)("Iter(CompileTrainingData): %p", this); 786 TrainingData::metaspace_pointers_do(iter); 787 _init_deps.metaspace_pointers_do(iter); 788 _ci_records.metaspace_pointers_do(iter); 789 iter->push(&_method); 790 } 791 792 template <typename T> 793 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) { 794 if (_deps == nullptr && _deps_dyn != nullptr) { 795 int len = _deps_dyn->length(); 796 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared); 797 for (int i = 0; i < len; i++) { 798 _deps->at_put(i, _deps_dyn->at(i)); // copy 799 } 800 } 801 } 802 803 KlassTrainingData* KlassTrainingData::allocate(InstanceKlass* holder) { 804 assert(need_data() || have_data(), ""); 805 if (TrainingDataLocker::can_add()) { 806 return new (mtClassShared) KlassTrainingData(holder); 807 } 808 return nullptr; 809 } 810 811 MethodTrainingData* MethodTrainingData::allocate(Method* m, KlassTrainingData* ktd) { 812 assert(need_data() || have_data(), ""); 813 if (TrainingDataLocker::can_add()) { 814 return new (mtClassShared) MethodTrainingData(m, ktd); 815 } 816 return nullptr; 817 } 818 819 CompileTrainingData* CompileTrainingData::allocate(MethodTrainingData* mtd, int level, int compile_id) { 820 assert(need_data() || have_data(), ""); 821 if (TrainingDataLocker::can_add()) { 822 return new (mtClassShared) CompileTrainingData(mtd, level, compile_id); 823 } 824 return nullptr; 825 } 826 827 void TrainingDataPrinter::do_value(TrainingData* td) { 828 #ifdef ASSERT 829 TrainingData::Key key(td->key()->meta()); 830 assert(td == TrainingData::archived_training_data_dictionary()->lookup(td->key(), TrainingData::Key::cds_hash(td->key()), -1), ""); 831 assert(td == TrainingData::archived_training_data_dictionary()->lookup(&key, TrainingData::Key::cds_hash(&key), -1), ""); 832 #endif // ASSERT 833 834 const char* type = (td->is_KlassTrainingData() ? "K" : 835 td->is_MethodTrainingData() ? "M" : 836 td->is_CompileTrainingData() ? "C" : "?"); 837 _st->print("%4d: %p %s ", _index++, td, type); 838 td->print_on(_st); 839 _st->cr(); 840 if (td->is_KlassTrainingData()) { 841 td->as_KlassTrainingData()->iterate_all_comp_deps([&](CompileTrainingData* ctd) { 842 ResourceMark rm; 843 _st->print_raw(" C "); 844 ctd->print_on(_st); 845 _st->cr(); 846 }); 847 } else if (td->is_MethodTrainingData()) { 848 td->as_MethodTrainingData()->iterate_all_compiles([&](CompileTrainingData* ctd) { 849 ResourceMark rm; 850 _st->print_raw(" C "); 851 ctd->print_on(_st); 852 _st->cr(); 853 }); 854 } else if (td->is_CompileTrainingData()) { 855 // ? 856 } 857 } 858 859 860 #if INCLUDE_CDS 861 void KlassTrainingData::remove_unshareable_info() { 862 TrainingData::remove_unshareable_info(); 863 _holder_mirror = nullptr; 864 _comp_deps.remove_unshareable_info(); 865 } 866 867 void MethodTrainingData::remove_unshareable_info() { 868 TrainingData::remove_unshareable_info(); 869 if (_final_counters != nullptr) { 870 _final_counters->remove_unshareable_info(); 871 } 872 if (_final_profile != nullptr) { 873 _final_profile->remove_unshareable_info(); 874 } 875 } 876 877 void CompileTrainingData::remove_unshareable_info() { 878 TrainingData::remove_unshareable_info(); 879 _init_deps.remove_unshareable_info(); 880 _ci_records.remove_unshareable_info(); 881 _init_deps_left = compute_init_deps_left(true); 882 } 883 884 #endif // INCLUDE_CDS