1 /* 2 * Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include <cds/archiveBuilder.hpp> 26 #include <classfile/systemDictionaryShared.hpp> 27 #include <compiler/compileBroker.hpp> 28 #include "precompiled.hpp" 29 #include "ci/ciEnv.hpp" 30 #include "ci/ciMetadata.hpp" 31 #include "cds/archiveBuilder.hpp" 32 #include "cds/cdsConfig.hpp" 33 #include "cds/metaspaceShared.hpp" 34 #include "cds/methodDataDictionary.hpp" 35 #include "cds/runTimeClassInfo.hpp" 36 #include "classfile/classLoaderData.hpp" 37 #include "classfile/compactHashtable.hpp" 38 #include "classfile/javaClasses.hpp" 39 #include "classfile/symbolTable.hpp" 40 #include "classfile/systemDictionaryShared.hpp" 41 #include "compiler/compileTask.hpp" 42 #include "memory/metadataFactory.hpp" 43 #include "memory/metaspaceClosure.hpp" 44 #include "memory/resourceArea.hpp" 45 #include "oops/fieldStreams.inline.hpp" 46 #include "oops/method.hpp" 47 #include "oops/methodCounters.hpp" 48 #include "oops/recompilationSchedule.hpp" 49 #include "oops/trainingData.hpp" 50 #include "runtime/arguments.hpp" 51 #include "runtime/fieldDescriptor.inline.hpp" 52 #include "runtime/javaThread.inline.hpp" 53 #include "runtime/jniHandles.inline.hpp" 54 #include "runtime/os.hpp" 55 #include "utilities/growableArray.hpp" 56 #include "utilities/xmlstream.hpp" 57 58 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff); 59 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary; 60 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping; 61 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr; 62 int TrainingData::TrainingDataLocker::_lock_mode; 63 volatile bool TrainingData::TrainingDataLocker::_snapshot = false; 64 65 MethodTrainingData::MethodTrainingData() { 66 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 67 } 68 69 KlassTrainingData::KlassTrainingData() { 70 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 71 } 72 73 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) { 74 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS"); 75 } 76 77 void TrainingData::initialize() { 78 // this is a nop if training modes are not enabled 79 if (have_data() || need_data()) { 80 TrainingDataLocker::initialize(); 81 } 82 RecompilationSchedule::initialize(); 83 } 84 85 #if INCLUDE_CDS 86 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) { 87 guarantee(TrainingData::Key::can_compute_cds_hash(k), ""); 88 TrainingData* td1 = TrainingData::lookup_archived_training_data(k); 89 guarantee(td == td1, ""); 90 } 91 #endif 92 93 void TrainingData::verify() { 94 #if INCLUDE_CDS 95 if (TrainingData::have_data()) { 96 archived_training_data_dictionary()->iterate([&](TrainingData* td) { 97 if (td->is_KlassTrainingData()) { 98 KlassTrainingData* ktd = td->as_KlassTrainingData(); 99 if (ktd->has_holder() && ktd->holder()->is_loaded()) { 100 Key k(ktd->holder()); 101 verify_archived_entry(td, &k); 102 } 103 ktd->verify(); 104 } else if (td->is_MethodTrainingData()) { 105 MethodTrainingData* mtd = td->as_MethodTrainingData(); 106 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) { 107 Key k(mtd->holder()); 108 verify_archived_entry(td, &k); 109 } 110 mtd->verify(); 111 } else if (td->is_CompileTrainingData()) { 112 td->as_CompileTrainingData()->verify(); 113 } 114 }); 115 } 116 #endif 117 } 118 119 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, 120 bool null_if_not_found) { 121 MethodTrainingData* mtd = nullptr; 122 if (!have_data() && !need_data()) { 123 return mtd; 124 } 125 // Try grabbing the cached value first. 126 MethodCounters* mcs = method->method_counters(); 127 if (mcs != nullptr) { 128 mtd = mcs->method_training_data(); 129 if (mtd != nullptr) { 130 return mtd; 131 } 132 } else { 133 mcs = Method::build_method_counters(Thread::current(), method()); 134 } 135 136 KlassTrainingData* holder = KlassTrainingData::make(method->method_holder(), null_if_not_found); 137 if (holder == nullptr) { 138 return nullptr; // allocation failure 139 } 140 Key key(method()); 141 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr; 142 if (td != nullptr) { 143 mtd = td->as_MethodTrainingData(); 144 method->init_training_data(mtd); // Cache the pointer for next time. 145 return mtd; 146 } else { 147 TrainingDataLocker l; 148 td = training_data_set()->find(&key); 149 if (td == nullptr && null_if_not_found) { 150 return nullptr; 151 } 152 if (td != nullptr) { 153 mtd = td->as_MethodTrainingData(); 154 method->init_training_data(mtd); // Cache the pointer for next time. 155 return mtd; 156 } 157 } 158 assert(td == nullptr && mtd == nullptr && !null_if_not_found, "Should return if have result"); 159 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder()); 160 if (ktd != nullptr) { 161 TrainingDataLocker l; 162 td = training_data_set()->find(&key); 163 if (td == nullptr) { 164 mtd = MethodTrainingData::allocate(method(), ktd); 165 if (mtd == nullptr) { 166 return nullptr; // allocation failure 167 } 168 td = training_data_set()->install(mtd); 169 assert(td == mtd, ""); 170 } else { 171 mtd = td->as_MethodTrainingData(); 172 } 173 method->init_training_data(mtd); 174 } 175 return mtd; 176 } 177 178 void MethodTrainingData::print_on(outputStream* st, bool name_only) const { 179 if (has_holder()) { 180 _klass->print_on(st, true); 181 st->print("."); 182 name()->print_symbol_on(st); 183 signature()->print_symbol_on(st); 184 } 185 if (name_only) { 186 return; 187 } 188 if (!has_holder()) { 189 st->print("[SYM]"); 190 } 191 if (_level_mask) { 192 st->print(" LM%d", _level_mask); 193 } 194 st->print(" mc=%p mdo=%p", _final_counters, _final_profile); 195 } 196 197 CompileTrainingData* CompileTrainingData::make(CompileTask* task) { 198 int level = task->comp_level(); 199 int compile_id = task->compile_id(); 200 Thread* thread = Thread::current(); 201 methodHandle m(thread, task->method()); 202 MethodTrainingData* mtd = MethodTrainingData::make(m); 203 if (mtd == nullptr) { 204 return nullptr; // allocation failure 205 } 206 mtd->notice_compilation(level); 207 208 TrainingDataLocker l; 209 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id); 210 if (ctd != nullptr) { 211 if (mtd->_last_toplevel_compiles[level - 1] != nullptr) { 212 if (mtd->_last_toplevel_compiles[level - 1]->compile_id() < compile_id) { 213 mtd->_last_toplevel_compiles[level - 1]->clear_init_deps(); 214 mtd->_last_toplevel_compiles[level - 1] = ctd; 215 mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level); 216 } 217 } else { 218 mtd->_last_toplevel_compiles[level - 1] = ctd; 219 mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level); 220 } 221 } 222 return ctd; 223 } 224 225 226 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) { 227 LogStreamHandle(Trace, training) log; 228 if (log.is_enabled()) { 229 log.print("CTD "); print_on(&log); log.cr(); 230 log.print("KTD "); ktd->print_on(&log); log.cr(); 231 } 232 assert(ktd!= nullptr && ktd->has_holder(), ""); 233 assert(_init_deps.contains(ktd), ""); 234 assert(_init_deps_left > 0, ""); 235 236 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1); 237 238 if (log.is_enabled()) { 239 uint init_deps_left2 = compute_init_deps_left(); 240 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2); 241 ktd->print_on(&log, true); 242 } 243 } 244 245 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) { 246 int left = 0; 247 for (int i = 0; i < _init_deps.length(); i++) { 248 KlassTrainingData* ktd = _init_deps.at(i); 249 // Ignore symbolic refs and already initialized classes (unless explicitly requested). 250 if (ktd->has_holder()) { 251 InstanceKlass* holder = ktd->holder(); 252 if (!ktd->holder()->is_initialized() || count_initialized) { 253 ++left; 254 } else if (holder->is_shared_unregistered_class()) { 255 Key k(holder); 256 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) { 257 ++left; // FIXME: !!! init tracking doesn't work well for custom loaders !!! 258 } 259 } 260 } 261 } 262 return left; 263 } 264 265 void CompileTrainingData::print_on(outputStream* st, bool name_only) const { 266 _method->print_on(st, true); 267 st->print("#%dL%d", _compile_id, _level); 268 if (name_only) { 269 return; 270 } 271 #define MAYBE_TIME(Q, _qtime) \ 272 if (_qtime != 0) st->print(" " #Q "%.3f", _qtime) 273 MAYBE_TIME(Q, _qtime); 274 MAYBE_TIME(S, _stime); 275 MAYBE_TIME(E, _etime); 276 if (_init_deps.length() > 0) { 277 if (_init_deps_left > 0) { 278 st->print(" udeps=%d", _init_deps_left); 279 } 280 for (int i = 0, len = _init_deps.length(); i < len; i++) { 281 st->print(" dep:"); 282 _init_deps.at(i)->print_on(st, true); 283 } 284 } 285 } 286 287 void CompileTrainingData::record_compilation_queued(CompileTask* task) { 288 _qtime = tty->time_stamp().seconds(); 289 } 290 void CompileTrainingData::record_compilation_start(CompileTask* task) { 291 _stime = tty->time_stamp().seconds(); 292 } 293 void CompileTrainingData::record_compilation_end(CompileTask* task) { 294 _etime = tty->time_stamp().seconds(); 295 if (task->is_success()) { // record something about the nmethod output 296 _nm_total_size = task->nm_total_size(); 297 } 298 } 299 void CompileTrainingData::notice_inlined_method(CompileTask* task, 300 const methodHandle& method) { 301 MethodTrainingData* mtd = MethodTrainingData::make(method); 302 if (mtd != nullptr) { 303 mtd->notice_compilation(task->comp_level(), true); 304 } 305 } 306 307 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) { 308 // A JIT is starting to look at class k. 309 // We could follow the queries that it is making, but it is 310 // simpler to assume, conservatively, that the JIT will 311 // eventually depend on the initialization state of k. 312 CompileTask* task = env->task(); 313 assert(task != nullptr, ""); 314 Method* method = task->method(); 315 InstanceKlass* compiling_klass = method->method_holder(); 316 if (what->is_metadata()) { 317 ciMetadata* md = what->as_metadata(); 318 if (md->is_loaded() && md->is_instance_klass()) { 319 ciInstanceKlass* cik = md->as_instance_klass(); 320 321 if (cik->is_initialized()) { 322 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass(); 323 KlassTrainingData* ktd = KlassTrainingData::make(ik); 324 if (ktd == nullptr) { 325 // Allocation failure or snapshot in progress 326 return; 327 } 328 // This JIT task is (probably) requesting that ik be initialized, 329 // so add him to my _init_deps list. 330 TrainingDataLocker l; 331 add_init_dep(ktd); 332 } 333 } 334 } 335 } 336 337 void KlassTrainingData::prepare(Visitor& visitor) { 338 if (visitor.is_visited(this)) { 339 return; 340 } 341 visitor.visit(this); 342 ClassLoaderData* loader_data = nullptr; 343 if (_holder != nullptr) { 344 loader_data = _holder->class_loader_data(); 345 } else { 346 loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD 347 } 348 _comp_deps.prepare(loader_data); 349 } 350 351 void MethodTrainingData::prepare(Visitor& visitor) { 352 if (visitor.is_visited(this)) { 353 return; 354 } 355 visitor.visit(this); 356 klass()->prepare(visitor); 357 if (has_holder()) { 358 _final_counters = holder()->method_counters(); 359 _final_profile = holder()->method_data(); 360 assert(_final_profile == nullptr || _final_profile->method() == holder(), ""); 361 } 362 for (int i = 0; i < CompLevel_count; i++) { 363 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 364 if (ctd != nullptr) { 365 ctd->prepare(visitor); 366 } 367 } 368 } 369 370 void CompileTrainingData::prepare(Visitor& visitor) { 371 if (visitor.is_visited(this)) { 372 return; 373 } 374 visitor.visit(this); 375 method()->prepare(visitor); 376 ClassLoaderData* loader_data = _method->klass()->class_loader_data(); 377 _init_deps.prepare(loader_data); 378 _ci_records.prepare(loader_data); 379 } 380 381 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) { 382 Key key(holder); 383 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr; 384 KlassTrainingData* ktd = nullptr; 385 if (td != nullptr) { 386 ktd = td->as_KlassTrainingData(); 387 guarantee(!ktd->has_holder() || ktd->holder() == holder, ""); 388 if (ktd->has_holder()) { 389 return ktd; 390 } 391 } 392 TrainingDataLocker l; 393 td = training_data_set()->find(&key); 394 if (td == nullptr) { 395 if (null_if_not_found) { 396 return nullptr; 397 } 398 ktd = KlassTrainingData::allocate(holder); 399 if (ktd == nullptr) { 400 return nullptr; // allocation failure 401 } 402 td = training_data_set()->install(ktd); 403 assert(ktd == td, ""); 404 } else { 405 ktd = td->as_KlassTrainingData(); 406 guarantee(ktd->holder() != nullptr, "null holder"); 407 } 408 assert(ktd != nullptr, ""); 409 guarantee(ktd->holder() == holder, ""); 410 return ktd; 411 } 412 413 void KlassTrainingData::print_on(outputStream* st, bool name_only) const { 414 if (has_holder()) { 415 name()->print_symbol_on(st); 416 switch (holder()->init_state()) { 417 case InstanceKlass::allocated: st->print("[A]"); break; 418 case InstanceKlass::loaded: st->print("[D]"); break; 419 case InstanceKlass::linked: st->print("[L]"); break; 420 case InstanceKlass::being_initialized: st->print("[i]"); break; 421 case InstanceKlass::fully_initialized: /*st->print("");*/ break; 422 case InstanceKlass::initialization_error: st->print("[E]"); break; 423 default: fatal("unknown state: %d", holder()->init_state()); 424 } 425 if (holder()->is_interface()) { 426 st->print("I"); 427 } 428 } else { 429 st->print("[SYM]"); 430 } 431 if (name_only) { 432 return; 433 } 434 if (_comp_deps.length() > 0) { 435 for (int i = 0, len = _comp_deps.length(); i < len; i++) { 436 st->print(" dep:"); 437 _comp_deps.at(i)->print_on(st, true); 438 } 439 } 440 } 441 442 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) { 443 if (holder() == klass) { 444 return; // no change to make 445 } 446 447 jobject hmj = _holder_mirror; 448 if (hmj != nullptr) { // clear out previous handle, if any 449 _holder_mirror = nullptr; 450 assert(JNIHandles::is_global_handle(hmj), ""); 451 JNIHandles::destroy_global(hmj); 452 } 453 454 // Keep the klass alive during the training run, unconditionally. 455 // 456 // FIXME: Revisit this decision; we could allow training runs to 457 // unload classes in the normal way. We might use make_weak_global 458 // instead of make_global. 459 // 460 // The data from the training run would mention the name of the 461 // unloaded class (and of its loader). Is it worth the complexity 462 // to track and then unload classes, remembering just their names? 463 464 if (klass != nullptr) { 465 Handle hm(JavaThread::current(), klass->java_mirror()); 466 hmj = JNIHandles::make_global(hm); 467 Atomic::release_store(&_holder_mirror, hmj); 468 } 469 470 Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass)); 471 assert(holder() == klass, ""); 472 } 473 474 void KlassTrainingData::notice_fully_initialized() { 475 ResourceMark rm; 476 assert(has_holder(), ""); 477 assert(holder()->is_initialized(), "wrong state: %s %s", 478 holder()->name()->as_C_string(), holder()->init_state_name()); 479 480 TrainingDataLocker l; // Not a real lock if we don't collect the data, 481 // that's why we need the atomic decrement below. 482 for (int i = 0; i < comp_dep_count(); i++) { 483 comp_dep(i)->dec_init_deps_left(this); 484 } 485 holder()->set_has_init_deps_processed(); 486 } 487 488 void TrainingData::init_dumptime_table(TRAPS) { 489 if (!need_data()) { 490 return; 491 } 492 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary(); 493 if (CDSConfig::is_dumping_final_static_archive()) { 494 _archived_training_data_dictionary.iterate([&](TrainingData* record) { 495 _dumptime_training_data_dictionary->append(record); 496 }); 497 } else { 498 TrainingDataLocker l; 499 TrainingDataLocker::snapshot(); 500 501 ResourceMark rm; 502 Visitor visitor(training_data_set()->size()); 503 training_data_set()->iterate_all([&](const TrainingData::Key* k, TrainingData* td) { 504 td->prepare(visitor); 505 if (!td->is_CompileTrainingData()) { 506 _dumptime_training_data_dictionary->append(td); 507 } 508 }); 509 510 if (VerifyTrainingData) { 511 training_data_set()->verify(); 512 } 513 } 514 515 RecompilationSchedule::prepare(CHECK); 516 } 517 518 #if INCLUDE_CDS 519 void TrainingData::iterate_roots(MetaspaceClosure* it) { 520 if (!need_data()) { 521 return; 522 } 523 assert(_dumptime_training_data_dictionary != nullptr, ""); 524 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 525 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it); 526 } 527 RecompilationSchedule::iterate_roots(it); 528 } 529 530 void TrainingData::dump_training_data() { 531 if (!need_data()) { 532 return; 533 } 534 write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping); 535 } 536 537 void TrainingData::cleanup_training_data() { 538 if (_dumptime_training_data_dictionary != nullptr) { 539 ResourceMark rm; 540 Visitor visitor(_dumptime_training_data_dictionary->length()); 541 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 542 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 543 td->cleanup(visitor); 544 } 545 // Throw away all elements with empty keys 546 int j = 0; 547 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 548 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 549 if (td->key()->is_empty()) { 550 continue; 551 } 552 if (i != j) { // no need to copy if it's the same 553 _dumptime_training_data_dictionary->at_put(j, td); 554 } 555 j++; 556 } 557 _dumptime_training_data_dictionary->trunc_to(j); 558 } 559 RecompilationSchedule::cleanup(); 560 } 561 562 void KlassTrainingData::cleanup(Visitor& visitor) { 563 if (visitor.is_visited(this)) { 564 return; 565 } 566 visitor.visit(this); 567 if (has_holder()) { 568 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr); 569 if (is_excluded) { 570 ResourceMark rm; 571 log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name()); 572 _holder = nullptr; 573 key()->make_empty(); 574 } 575 } 576 for (int i = 0; i < _comp_deps.length(); i++) { 577 _comp_deps.at(i)->cleanup(visitor); 578 } 579 } 580 581 void MethodTrainingData::cleanup(Visitor& visitor) { 582 if (visitor.is_visited(this)) { 583 return; 584 } 585 visitor.visit(this); 586 if (has_holder()) { 587 if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) { 588 log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 589 if (_final_profile != nullptr && _final_profile->method() != _holder) { 590 log_warning(cds)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8()); 591 } 592 _holder = nullptr; 593 key()->make_empty(); 594 } 595 } 596 for (int i = 0; i < CompLevel_count; i++) { 597 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 598 if (ctd != nullptr) { 599 ctd->cleanup(visitor); 600 } 601 } 602 } 603 604 void KlassTrainingData::verify() { 605 for (int i = 0; i < comp_dep_count(); i++) { 606 CompileTrainingData* ctd = comp_dep(i); 607 if (!ctd->_init_deps.contains(this)) { 608 print_on(tty); tty->cr(); 609 ctd->print_on(tty); tty->cr(); 610 } 611 guarantee(ctd->_init_deps.contains(this), ""); 612 } 613 } 614 615 void MethodTrainingData::verify() { 616 iterate_all_compiles([](CompileTrainingData* ctd) { 617 ctd->verify(); 618 619 int init_deps_left1 = ctd->init_deps_left(); 620 int init_deps_left2 = ctd->compute_init_deps_left(); 621 622 if (init_deps_left1 != init_deps_left2) { 623 ctd->print_on(tty); tty->cr(); 624 } 625 guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d", 626 init_deps_left1, init_deps_left2, ctd->init_deps_left()); 627 }); 628 } 629 630 void CompileTrainingData::verify() { 631 for (int i = 0; i < init_dep_count(); i++) { 632 KlassTrainingData* ktd = init_dep(i); 633 if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) { 634 LogStreamHandle(Warning, training) log; 635 if (log.is_enabled()) { 636 ResourceMark rm; 637 log.print("CTD "); print_value_on(&log); 638 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string()); 639 } 640 } 641 if (!ktd->_comp_deps.contains(this)) { 642 print_on(tty); tty->cr(); 643 ktd->print_on(tty); tty->cr(); 644 } 645 guarantee(ktd->_comp_deps.contains(this), ""); 646 } 647 } 648 649 void CompileTrainingData::cleanup(Visitor& visitor) { 650 if (visitor.is_visited(this)) { 651 return; 652 } 653 visitor.visit(this); 654 method()->cleanup(visitor); 655 } 656 657 void TrainingData::serialize_training_data(SerializeClosure* soc) { 658 if (soc->writing()) { 659 _archived_training_data_dictionary_for_dumping.serialize_header(soc); 660 } else { 661 _archived_training_data_dictionary.serialize_header(soc); 662 } 663 RecompilationSchedule::serialize_training_data(soc); 664 } 665 666 void TrainingData::print_archived_training_data_on(outputStream* st) { 667 st->print_cr("Archived TrainingData Dictionary"); 668 TrainingDataPrinter tdp(st); 669 TrainingDataLocker::initialize(); 670 _archived_training_data_dictionary.iterate(&tdp); 671 RecompilationSchedule::print_archived_training_data_on(st); 672 } 673 674 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) { 675 iter->push(const_cast<Metadata**>(&_meta)); 676 } 677 678 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 679 _key.metaspace_pointers_do(iter); 680 } 681 682 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) { 683 return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta()); 684 } 685 686 uint TrainingData::Key::cds_hash(const Key* const& k) { 687 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta()); 688 } 689 690 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) { 691 if (!need_data()) { 692 return; 693 } 694 assert(_dumptime_training_data_dictionary != nullptr, ""); 695 CompactHashtableStats stats; 696 dictionary->reset(); 697 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats); 698 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) { 699 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data(); 700 #ifdef ASSERT 701 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) { 702 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data(); 703 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict"); 704 } 705 #endif // ASSERT 706 td = ArchiveBuilder::current()->get_buffered_addr(td); 707 uint hash = TrainingData::Key::cds_hash(td->key()); 708 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td); 709 writer.add(hash, delta); 710 } 711 writer.dump(dictionary, "training data dictionary"); 712 } 713 714 size_t TrainingData::estimate_size_for_archive() { 715 if (_dumptime_training_data_dictionary != nullptr) { 716 return CompactHashtableWriter::estimate_size(_dumptime_training_data_dictionary->length()); 717 } else { 718 return 0; 719 } 720 } 721 722 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) { 723 // For this to work, all components of the key must be in shared metaspace. 724 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) { 725 return nullptr; 726 } 727 uint hash = TrainingData::Key::cds_hash(k); 728 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/); 729 if (td != nullptr) { 730 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) || 731 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) { 732 return td; 733 } else { 734 ShouldNotReachHere(); 735 } 736 } 737 return nullptr; 738 } 739 #endif 740 741 KlassTrainingData* TrainingData::lookup_for(InstanceKlass* ik) { 742 #if INCLUDE_CDS 743 if (TrainingData::have_data() && ik != nullptr && ik->is_loaded()) { 744 TrainingData::Key key(ik); 745 TrainingData* td = TrainingData::lookup_archived_training_data(&key); 746 if (td != nullptr && td->is_KlassTrainingData()) { 747 return td->as_KlassTrainingData(); 748 } 749 } 750 #endif 751 return nullptr; 752 } 753 754 MethodTrainingData* TrainingData::lookup_for(Method* m) { 755 #if INCLUDE_CDS 756 if (TrainingData::have_data() && m != nullptr) { 757 KlassTrainingData* holder_ktd = TrainingData::lookup_for(m->method_holder()); 758 if (holder_ktd != nullptr) { 759 TrainingData::Key key(m); 760 TrainingData* td = TrainingData::lookup_archived_training_data(&key); 761 if (td != nullptr && td->is_MethodTrainingData()) { 762 return td->as_MethodTrainingData(); 763 } 764 } 765 } 766 #endif 767 return nullptr; 768 } 769 770 template <typename T> 771 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) { 772 iter->push(&_deps); 773 } 774 775 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 776 log_trace(cds)("Iter(KlassTrainingData): %p", this); 777 #if INCLUDE_CDS 778 TrainingData::metaspace_pointers_do(iter); 779 #endif 780 _comp_deps.metaspace_pointers_do(iter); 781 iter->push(&_holder); 782 } 783 784 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 785 log_trace(cds)("Iter(MethodTrainingData): %p", this); 786 #if INCLUDE_CDS 787 TrainingData::metaspace_pointers_do(iter); 788 #endif 789 iter->push(&_klass); 790 iter->push((Method**)&_holder); 791 for (int i = 0; i < CompLevel_count; i++) { 792 iter->push(&_last_toplevel_compiles[i]); 793 } 794 iter->push(&_final_profile); 795 iter->push(&_final_counters); 796 } 797 798 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) { 799 log_trace(cds)("Iter(CompileTrainingData): %p", this); 800 #if INCLUDE_CDS 801 TrainingData::metaspace_pointers_do(iter); 802 #endif 803 _init_deps.metaspace_pointers_do(iter); 804 _ci_records.metaspace_pointers_do(iter); 805 iter->push(&_method); 806 } 807 808 template <typename T> 809 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) { 810 if (_deps == nullptr && _deps_dyn != nullptr) { 811 int len = _deps_dyn->length(); 812 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared); 813 for (int i = 0; i < len; i++) { 814 _deps->at_put(i, _deps_dyn->at(i)); // copy 815 } 816 } 817 } 818 819 KlassTrainingData* KlassTrainingData::allocate(InstanceKlass* holder) { 820 assert(need_data() || have_data(), ""); 821 if (TrainingDataLocker::can_add()) { 822 return new (mtClassShared) KlassTrainingData(holder); 823 } 824 return nullptr; 825 } 826 827 MethodTrainingData* MethodTrainingData::allocate(Method* m, KlassTrainingData* ktd) { 828 assert(need_data() || have_data(), ""); 829 if (TrainingDataLocker::can_add()) { 830 return new (mtClassShared) MethodTrainingData(m, ktd); 831 } 832 return nullptr; 833 } 834 835 CompileTrainingData* CompileTrainingData::allocate(MethodTrainingData* mtd, int level, int compile_id) { 836 assert(need_data() || have_data(), ""); 837 if (TrainingDataLocker::can_add()) { 838 return new (mtClassShared) CompileTrainingData(mtd, level, compile_id); 839 } 840 return nullptr; 841 } 842 843 void TrainingDataPrinter::do_value(TrainingData* td) { 844 #ifdef ASSERT 845 #if INCLUDE_CDS 846 TrainingData::Key key(td->key()->meta()); 847 assert(td == TrainingData::archived_training_data_dictionary()->lookup(td->key(), TrainingData::Key::cds_hash(td->key()), -1), ""); 848 assert(td == TrainingData::archived_training_data_dictionary()->lookup(&key, TrainingData::Key::cds_hash(&key), -1), ""); 849 #endif 850 #endif // ASSERT 851 852 const char* type = (td->is_KlassTrainingData() ? "K" : 853 td->is_MethodTrainingData() ? "M" : 854 td->is_CompileTrainingData() ? "C" : "?"); 855 _st->print("%4d: %p %s ", _index++, td, type); 856 td->print_on(_st); 857 _st->cr(); 858 if (td->is_KlassTrainingData()) { 859 td->as_KlassTrainingData()->iterate_all_comp_deps([&](CompileTrainingData* ctd) { 860 ResourceMark rm; 861 _st->print_raw(" C "); 862 ctd->print_on(_st); 863 _st->cr(); 864 }); 865 } else if (td->is_MethodTrainingData()) { 866 td->as_MethodTrainingData()->iterate_all_compiles([&](CompileTrainingData* ctd) { 867 ResourceMark rm; 868 _st->print_raw(" C "); 869 ctd->print_on(_st); 870 _st->cr(); 871 }); 872 } else if (td->is_CompileTrainingData()) { 873 // ? 874 } 875 } 876 877 878 #if INCLUDE_CDS 879 void KlassTrainingData::remove_unshareable_info() { 880 TrainingData::remove_unshareable_info(); 881 _holder_mirror = nullptr; 882 _comp_deps.remove_unshareable_info(); 883 } 884 885 void MethodTrainingData::remove_unshareable_info() { 886 TrainingData::remove_unshareable_info(); 887 if (_final_counters != nullptr) { 888 _final_counters->remove_unshareable_info(); 889 } 890 if (_final_profile != nullptr) { 891 _final_profile->remove_unshareable_info(); 892 } 893 } 894 895 void CompileTrainingData::remove_unshareable_info() { 896 TrainingData::remove_unshareable_info(); 897 _init_deps.remove_unshareable_info(); 898 _ci_records.remove_unshareable_info(); 899 _init_deps_left = compute_init_deps_left(true); 900 } 901 902 #endif // INCLUDE_CDS