18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciEnv.hpp"
26 #include "ci/ciMetadata.hpp"
27 #include "cds/cdsConfig.hpp"
28 #include "cds/metaspaceShared.hpp"
29 #include "classfile/classLoaderData.hpp"
30 #include "classfile/compactHashtable.hpp"
31 #include "classfile/javaClasses.hpp"
32 #include "classfile/symbolTable.hpp"
33 #include "classfile/systemDictionaryShared.hpp"
34 #include "compiler/compileTask.hpp"
35 #include "memory/metadataFactory.hpp"
36 #include "memory/metaspaceClosure.hpp"
37 #include "memory/resourceArea.hpp"
38 #include "oops/method.hpp"
39 #include "oops/methodCounters.hpp"
40 #include "oops/trainingData.hpp"
41 #include "runtime/arguments.hpp"
42 #include "runtime/javaThread.inline.hpp"
43 #include "runtime/jniHandles.inline.hpp"
44 #include "utilities/growableArray.hpp"
45
46 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
47 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
49 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
50 int TrainingData::TrainingDataLocker::_lock_mode;
51 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
52
53 MethodTrainingData::MethodTrainingData() {
54 // Used by cppVtables.cpp only
55 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
56 }
57
58 KlassTrainingData::KlassTrainingData() {
59 // Used by cppVtables.cpp only
60 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
61 }
62
63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
64 // Used by cppVtables.cpp only
65 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
66 }
67
68 void TrainingData::initialize() {
69 // this is a nop if training modes are not enabled
70 if (have_data() || need_data()) {
71 // Data structures that we have do not currently support iterative training. So you cannot replay
72 // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
73 guarantee(have_data() != need_data(), "Iterative training is not supported");
74 TrainingDataLocker::initialize();
75 }
76 }
77
78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
79 guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
80 TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
81 guarantee(td == td1, "");
82 }
83
84 void TrainingData::verify() {
85 if (TrainingData::have_data()) {
86 archived_training_data_dictionary()->iterate([&](TrainingData* td) {
87 if (td->is_KlassTrainingData()) {
88 KlassTrainingData* ktd = td->as_KlassTrainingData();
89 if (ktd->has_holder() && ktd->holder()->is_loaded()) {
90 Key k(ktd->holder());
91 verify_archived_entry(td, &k);
92 }
93 ktd->verify();
94 } else if (td->is_MethodTrainingData()) {
95 MethodTrainingData* mtd = td->as_MethodTrainingData();
203 if (m->method_holder() == nullptr) {
204 return nullptr; // do not record (dynamically generated method)
205 }
206 MethodTrainingData* mtd = MethodTrainingData::make(m);
207 if (mtd == nullptr) {
208 return nullptr; // allocation failure
209 }
210 mtd->notice_compilation(level);
211
212 TrainingDataLocker l;
213 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
214 if (ctd != nullptr) {
215 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
216 if (last_ctd != nullptr) {
217 assert(mtd->highest_top_level() >= level, "consistency");
218 if (last_ctd->compile_id() < compile_id) {
219 last_ctd->clear_init_deps();
220 last_ctd = ctd;
221 }
222 } else {
223 last_ctd = ctd;
224 mtd->notice_toplevel_compilation(level);
225 }
226 }
227 return ctd;
228 }
229
230
231 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
232 LogStreamHandle(Trace, training) log;
233 if (log.is_enabled()) {
234 log.print("CTD "); print_on(&log); log.cr();
235 log.print("KTD "); ktd->print_on(&log); log.cr();
236 }
237 assert(ktd!= nullptr && ktd->has_holder(), "");
238 assert(_init_deps.contains(ktd), "");
239 assert(_init_deps_left > 0, "");
240
241 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
242
243 if (log.is_enabled()) {
244 uint init_deps_left2 = compute_init_deps_left();
415 default: fatal("unknown state: %d", holder()->init_state());
416 }
417 if (holder()->is_interface()) {
418 st->print("I");
419 }
420 } else {
421 st->print("[SYM]");
422 }
423 if (name_only) {
424 return;
425 }
426 if (_comp_deps.length() > 0) {
427 for (int i = 0, len = _comp_deps.length(); i < len; i++) {
428 st->print(" dep:");
429 _comp_deps.at(i)->print_on(st, true);
430 }
431 }
432 }
433
434 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
435 if (holder() == klass) {
436 return; // no change to make
437 }
438
439 jobject hmj = _holder_mirror;
440 if (hmj != nullptr) { // clear out previous handle, if any
441 _holder_mirror = nullptr;
442 assert(JNIHandles::is_global_handle(hmj), "");
443 JNIHandles::destroy_global(hmj);
444 }
445
446 if (klass != nullptr) {
447 Handle hm(JavaThread::current(), klass->java_mirror());
448 hmj = JNIHandles::make_global(hm);
449 Atomic::release_store(&_holder_mirror, hmj);
450 }
451
452 Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
453 assert(holder() == klass, "");
454 }
455
456 void KlassTrainingData::notice_fully_initialized() {
457 ResourceMark rm;
458 assert(has_holder(), "");
459 assert(holder()->is_initialized(), "wrong state: %s %s",
460 holder()->name()->as_C_string(), holder()->init_state_name());
461
462 TrainingDataLocker l; // Not a real lock if we don't collect the data,
463 // that's why we need the atomic decrement below.
464 for (int i = 0; i < comp_dep_count(); i++) {
465 comp_dep(i)->dec_init_deps_left(this);
466 }
467 holder()->set_has_init_deps_processed();
468 }
469
470 void TrainingData::init_dumptime_table(TRAPS) {
471 precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
472 if (assembling_data()) {
476 });
477 }
478 if (need_data()) {
479 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
480 TrainingDataLocker l;
481 TrainingDataLocker::snapshot();
482
483 ResourceMark rm;
484 Visitor visitor(training_data_set()->size());
485 training_data_set()->iterate([&](TrainingData* td) {
486 td->prepare(visitor);
487 if (!td->is_CompileTrainingData()) {
488 _dumptime_training_data_dictionary->append(td);
489 }
490 });
491
492 if (AOTVerifyTrainingData) {
493 training_data_set()->verify();
494 }
495 }
496 }
497
498 void TrainingData::iterate_roots(MetaspaceClosure* it) {
499 if (_dumptime_training_data_dictionary != nullptr) {
500 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
501 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
502 }
503 }
504 }
505
506 void TrainingData::dump_training_data() {
507 if (_dumptime_training_data_dictionary != nullptr) {
508 CompactHashtableStats stats;
509 _archived_training_data_dictionary_for_dumping.reset();
510 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
511 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
512 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
513 #ifdef ASSERT
514 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
515 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
516 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
517 }
518 #endif // ASSERT
519 td = ArchiveBuilder::current()->get_buffered_addr(td);
520 uint hash = TrainingData::Key::cds_hash(td->key());
521 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
522 writer.add(hash, delta);
523 }
530 ResourceMark rm;
531 Visitor visitor(_dumptime_training_data_dictionary->length());
532 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534 td->cleanup(visitor);
535 }
536 // Throw away all elements with empty keys
537 int j = 0;
538 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
539 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
540 if (td->key()->is_empty()) {
541 continue;
542 }
543 if (i != j) { // no need to copy if it's the same
544 _dumptime_training_data_dictionary->at_put(j, td);
545 }
546 j++;
547 }
548 _dumptime_training_data_dictionary->trunc_to(j);
549 }
550 }
551
552 void KlassTrainingData::cleanup(Visitor& visitor) {
553 if (visitor.is_visited(this)) {
554 return;
555 }
556 visitor.visit(this);
557 if (has_holder()) {
558 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
559 if (is_excluded) {
560 ResourceMark rm;
561 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
562 _holder = nullptr;
563 key()->make_empty();
564 }
565 }
566 for (int i = 0; i < _comp_deps.length(); i++) {
567 _comp_deps.at(i)->cleanup(visitor);
568 }
569 }
635 ktd->print_on(tty); tty->cr();
636 }
637 guarantee(ktd->_comp_deps.contains(this), "");
638 }
639 }
640
641 void CompileTrainingData::cleanup(Visitor& visitor) {
642 if (visitor.is_visited(this)) {
643 return;
644 }
645 visitor.visit(this);
646 method()->cleanup(visitor);
647 }
648
649 void TrainingData::serialize(SerializeClosure* soc) {
650 if (soc->writing()) {
651 _archived_training_data_dictionary_for_dumping.serialize_header(soc);
652 } else {
653 _archived_training_data_dictionary.serialize_header(soc);
654 }
655 }
656
657 class TrainingDataPrinter : StackObj {
658 outputStream* _st;
659 int _index;
660 public:
661 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
662 void do_value(TrainingData* td) {
663 const char* type = (td->is_KlassTrainingData() ? "K" :
664 td->is_MethodTrainingData() ? "M" :
665 td->is_CompileTrainingData() ? "C" : "?");
666 _st->print("%4d: %p %s ", _index++, td, type);
667 td->print_on(_st);
668 _st->cr();
669 if (td->is_KlassTrainingData()) {
670 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
671 ResourceMark rm;
672 _st->print_raw(" C ");
673 ctd->print_on(_st);
674 _st->cr();
675 });
676 } else if (td->is_MethodTrainingData()) {
677 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
678 ResourceMark rm;
679 _st->print_raw(" C ");
680 ctd->print_on(_st);
681 _st->cr();
682 });
683 } else if (td->is_CompileTrainingData()) {
684 // ?
685 }
686 }
687 };
688
689 void TrainingData::print_archived_training_data_on(outputStream* st) {
690 st->print_cr("Archived TrainingData Dictionary");
691 TrainingDataPrinter tdp(st);
692 TrainingDataLocker::initialize();
693 _archived_training_data_dictionary.iterate(&tdp);
694 }
695
696 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
697 iter->push(const_cast<Metadata**>(&_meta));
698 }
699
700 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
701 _key.metaspace_pointers_do(iter);
702 }
703
704 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
705 return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
706 }
707
708 uint TrainingData::Key::cds_hash(const Key* const& k) {
709 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
710 }
711
712 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
713 // For this to work, all components of the key must be in shared metaspace.
755 log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
756 TrainingData::metaspace_pointers_do(iter);
757 _init_deps.metaspace_pointers_do(iter);
758 _ci_records.metaspace_pointers_do(iter);
759 iter->push(&_method);
760 }
761
762 template <typename T>
763 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
764 if (_deps == nullptr && _deps_dyn != nullptr) {
765 int len = _deps_dyn->length();
766 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
767 for (int i = 0; i < len; i++) {
768 _deps->at_put(i, _deps_dyn->at(i)); // copy
769 }
770 }
771 }
772
773 void KlassTrainingData::remove_unshareable_info() {
774 TrainingData::remove_unshareable_info();
775 _holder_mirror = nullptr;
776 _comp_deps.remove_unshareable_info();
777 }
778
779 void MethodTrainingData::remove_unshareable_info() {
780 TrainingData::remove_unshareable_info();
781 if (_final_counters != nullptr) {
782 _final_counters->remove_unshareable_info();
783 }
784 if (_final_profile != nullptr) {
785 _final_profile->remove_unshareable_info();
786 }
787 }
788
789 void CompileTrainingData::remove_unshareable_info() {
790 TrainingData::remove_unshareable_info();
791 _init_deps.remove_unshareable_info();
792 _ci_records.remove_unshareable_info();
793 _init_deps_left = compute_init_deps_left(true);
794 }
|
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciEnv.hpp"
26 #include "ci/ciMetadata.hpp"
27 #include "cds/cdsConfig.hpp"
28 #include "cds/metaspaceShared.hpp"
29 #include "classfile/classLoaderData.hpp"
30 #include "classfile/compactHashtable.hpp"
31 #include "classfile/javaClasses.hpp"
32 #include "classfile/symbolTable.hpp"
33 #include "classfile/systemDictionaryShared.hpp"
34 #include "compiler/compileTask.hpp"
35 #include "memory/metadataFactory.hpp"
36 #include "memory/metaspaceClosure.hpp"
37 #include "memory/resourceArea.hpp"
38 #include "memory/universe.hpp"
39 #include "oops/method.hpp"
40 #include "oops/methodCounters.hpp"
41 #include "oops/recompilationSchedule.hpp"
42 #include "oops/trainingData.hpp"
43 #include "runtime/arguments.hpp"
44 #include "runtime/javaThread.inline.hpp"
45 #include "runtime/jniHandles.inline.hpp"
46 #include "utilities/growableArray.hpp"
47
48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
52 int TrainingData::TrainingDataLocker::_lock_mode;
53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
54
55 MethodTrainingData::MethodTrainingData() {
56 // Used by cppVtables.cpp only
57 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
58 }
59
60 KlassTrainingData::KlassTrainingData() {
61 // Used by cppVtables.cpp only
62 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
63 }
64
65 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
66 // Used by cppVtables.cpp only
67 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
68 }
69
70 void TrainingData::initialize() {
71 // this is a nop if training modes are not enabled
72 if (have_data() || need_data()) {
73 // Data structures that we have do not currently support iterative training. So you cannot replay
74 // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
75 guarantee(have_data() != need_data(), "Iterative training is not supported");
76 TrainingDataLocker::initialize();
77 }
78 RecompilationSchedule::initialize();
79 }
80
81 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
82 guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
83 TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
84 guarantee(td == td1, "");
85 }
86
87 void TrainingData::verify() {
88 if (TrainingData::have_data()) {
89 archived_training_data_dictionary()->iterate([&](TrainingData* td) {
90 if (td->is_KlassTrainingData()) {
91 KlassTrainingData* ktd = td->as_KlassTrainingData();
92 if (ktd->has_holder() && ktd->holder()->is_loaded()) {
93 Key k(ktd->holder());
94 verify_archived_entry(td, &k);
95 }
96 ktd->verify();
97 } else if (td->is_MethodTrainingData()) {
98 MethodTrainingData* mtd = td->as_MethodTrainingData();
206 if (m->method_holder() == nullptr) {
207 return nullptr; // do not record (dynamically generated method)
208 }
209 MethodTrainingData* mtd = MethodTrainingData::make(m);
210 if (mtd == nullptr) {
211 return nullptr; // allocation failure
212 }
213 mtd->notice_compilation(level);
214
215 TrainingDataLocker l;
216 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
217 if (ctd != nullptr) {
218 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
219 if (last_ctd != nullptr) {
220 assert(mtd->highest_top_level() >= level, "consistency");
221 if (last_ctd->compile_id() < compile_id) {
222 last_ctd->clear_init_deps();
223 last_ctd = ctd;
224 }
225 } else {
226 last_ctd = ctd;
227 mtd->notice_toplevel_compilation(level);
228 }
229 }
230 return ctd;
231 }
232
233
234 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
235 LogStreamHandle(Trace, training) log;
236 if (log.is_enabled()) {
237 log.print("CTD "); print_on(&log); log.cr();
238 log.print("KTD "); ktd->print_on(&log); log.cr();
239 }
240 assert(ktd!= nullptr && ktd->has_holder(), "");
241 assert(_init_deps.contains(ktd), "");
242 assert(_init_deps_left > 0, "");
243
244 uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
245
246 if (log.is_enabled()) {
247 uint init_deps_left2 = compute_init_deps_left();
418 default: fatal("unknown state: %d", holder()->init_state());
419 }
420 if (holder()->is_interface()) {
421 st->print("I");
422 }
423 } else {
424 st->print("[SYM]");
425 }
426 if (name_only) {
427 return;
428 }
429 if (_comp_deps.length() > 0) {
430 for (int i = 0, len = _comp_deps.length(); i < len; i++) {
431 st->print(" dep:");
432 _comp_deps.at(i)->print_on(st, true);
433 }
434 }
435 }
436
437 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
438 assert(klass != nullptr, "");
439 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve
440 // the handle object.
441 OopHandle handle(Universe::vm_global(), klass->java_mirror());
442 _holder = klass;
443 assert(holder() == klass, "");
444 }
445
446 void KlassTrainingData::notice_fully_initialized() {
447 ResourceMark rm;
448 assert(has_holder(), "");
449 assert(holder()->is_initialized(), "wrong state: %s %s",
450 holder()->name()->as_C_string(), holder()->init_state_name());
451
452 TrainingDataLocker l; // Not a real lock if we don't collect the data,
453 // that's why we need the atomic decrement below.
454 for (int i = 0; i < comp_dep_count(); i++) {
455 comp_dep(i)->dec_init_deps_left(this);
456 }
457 holder()->set_has_init_deps_processed();
458 }
459
460 void TrainingData::init_dumptime_table(TRAPS) {
461 precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
462 if (assembling_data()) {
466 });
467 }
468 if (need_data()) {
469 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
470 TrainingDataLocker l;
471 TrainingDataLocker::snapshot();
472
473 ResourceMark rm;
474 Visitor visitor(training_data_set()->size());
475 training_data_set()->iterate([&](TrainingData* td) {
476 td->prepare(visitor);
477 if (!td->is_CompileTrainingData()) {
478 _dumptime_training_data_dictionary->append(td);
479 }
480 });
481
482 if (AOTVerifyTrainingData) {
483 training_data_set()->verify();
484 }
485 }
486
487 RecompilationSchedule::prepare(CHECK);
488 }
489
490 void TrainingData::iterate_roots(MetaspaceClosure* it) {
491 if (_dumptime_training_data_dictionary != nullptr) {
492 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
493 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
494 }
495 }
496 RecompilationSchedule::iterate_roots(it);
497 }
498
499 void TrainingData::dump_training_data() {
500 if (_dumptime_training_data_dictionary != nullptr) {
501 CompactHashtableStats stats;
502 _archived_training_data_dictionary_for_dumping.reset();
503 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
504 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
505 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
506 #ifdef ASSERT
507 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
508 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
509 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
510 }
511 #endif // ASSERT
512 td = ArchiveBuilder::current()->get_buffered_addr(td);
513 uint hash = TrainingData::Key::cds_hash(td->key());
514 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
515 writer.add(hash, delta);
516 }
523 ResourceMark rm;
524 Visitor visitor(_dumptime_training_data_dictionary->length());
525 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
526 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
527 td->cleanup(visitor);
528 }
529 // Throw away all elements with empty keys
530 int j = 0;
531 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
532 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
533 if (td->key()->is_empty()) {
534 continue;
535 }
536 if (i != j) { // no need to copy if it's the same
537 _dumptime_training_data_dictionary->at_put(j, td);
538 }
539 j++;
540 }
541 _dumptime_training_data_dictionary->trunc_to(j);
542 }
543 RecompilationSchedule::cleanup();
544 }
545
546 void KlassTrainingData::cleanup(Visitor& visitor) {
547 if (visitor.is_visited(this)) {
548 return;
549 }
550 visitor.visit(this);
551 if (has_holder()) {
552 bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
553 if (is_excluded) {
554 ResourceMark rm;
555 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
556 _holder = nullptr;
557 key()->make_empty();
558 }
559 }
560 for (int i = 0; i < _comp_deps.length(); i++) {
561 _comp_deps.at(i)->cleanup(visitor);
562 }
563 }
629 ktd->print_on(tty); tty->cr();
630 }
631 guarantee(ktd->_comp_deps.contains(this), "");
632 }
633 }
634
635 void CompileTrainingData::cleanup(Visitor& visitor) {
636 if (visitor.is_visited(this)) {
637 return;
638 }
639 visitor.visit(this);
640 method()->cleanup(visitor);
641 }
642
643 void TrainingData::serialize(SerializeClosure* soc) {
644 if (soc->writing()) {
645 _archived_training_data_dictionary_for_dumping.serialize_header(soc);
646 } else {
647 _archived_training_data_dictionary.serialize_header(soc);
648 }
649 RecompilationSchedule::serialize(soc);
650 }
651
652 class TrainingDataPrinter : StackObj {
653 outputStream* _st;
654 int _index;
655 public:
656 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
657 void do_value(TrainingData* td) {
658 const char* type = (td->is_KlassTrainingData() ? "K" :
659 td->is_MethodTrainingData() ? "M" :
660 td->is_CompileTrainingData() ? "C" : "?");
661 _st->print("%4d: %p %s ", _index++, td, type);
662 td->print_on(_st);
663 _st->cr();
664 if (td->is_KlassTrainingData()) {
665 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
666 ResourceMark rm;
667 _st->print_raw(" C ");
668 ctd->print_on(_st);
669 _st->cr();
670 });
671 } else if (td->is_MethodTrainingData()) {
672 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
673 ResourceMark rm;
674 _st->print_raw(" C ");
675 ctd->print_on(_st);
676 _st->cr();
677 });
678 } else if (td->is_CompileTrainingData()) {
679 // ?
680 }
681 }
682 };
683
684 void TrainingData::print_archived_training_data_on(outputStream* st) {
685 st->print_cr("Archived TrainingData Dictionary");
686 TrainingDataPrinter tdp(st);
687 TrainingDataLocker::initialize();
688 _archived_training_data_dictionary.iterate(&tdp);
689 RecompilationSchedule::print_archived_training_data_on(st);
690 }
691
692 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
693 iter->push(const_cast<Metadata**>(&_meta));
694 }
695
696 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
697 _key.metaspace_pointers_do(iter);
698 }
699
700 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
701 return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
702 }
703
704 uint TrainingData::Key::cds_hash(const Key* const& k) {
705 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
706 }
707
708 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
709 // For this to work, all components of the key must be in shared metaspace.
751 log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
752 TrainingData::metaspace_pointers_do(iter);
753 _init_deps.metaspace_pointers_do(iter);
754 _ci_records.metaspace_pointers_do(iter);
755 iter->push(&_method);
756 }
757
758 template <typename T>
759 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
760 if (_deps == nullptr && _deps_dyn != nullptr) {
761 int len = _deps_dyn->length();
762 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
763 for (int i = 0; i < len; i++) {
764 _deps->at_put(i, _deps_dyn->at(i)); // copy
765 }
766 }
767 }
768
769 void KlassTrainingData::remove_unshareable_info() {
770 TrainingData::remove_unshareable_info();
771 _comp_deps.remove_unshareable_info();
772 }
773
774 void MethodTrainingData::remove_unshareable_info() {
775 TrainingData::remove_unshareable_info();
776 if (_final_counters != nullptr) {
777 _final_counters->remove_unshareable_info();
778 }
779 if (_final_profile != nullptr) {
780 _final_profile->remove_unshareable_info();
781 }
782 }
783
784 void CompileTrainingData::remove_unshareable_info() {
785 TrainingData::remove_unshareable_info();
786 _init_deps.remove_unshareable_info();
787 _ci_records.remove_unshareable_info();
788 _init_deps_left = compute_init_deps_left(true);
789 }
|