< prev index next >

src/hotspot/share/oops/trainingData.cpp

Print this page

 21  * questions.
 22  *
 23  */
 24 
 25 #include "cds/aotCompressedPointers.hpp"
 26 #include "cds/cdsConfig.hpp"
 27 #include "ci/ciEnv.hpp"
 28 #include "ci/ciMetadata.hpp"
 29 #include "classfile/compactHashtable.hpp"
 30 #include "classfile/javaClasses.hpp"
 31 #include "classfile/symbolTable.hpp"
 32 #include "classfile/systemDictionaryShared.hpp"
 33 #include "compiler/compileTask.hpp"
 34 #include "memory/metadataFactory.hpp"
 35 #include "memory/metaspaceClosure.hpp"
 36 #include "memory/resourceArea.hpp"
 37 #include "memory/universe.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/method.inline.hpp"
 40 #include "oops/methodCounters.hpp"

 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   // Used by cppVtables.cpp only
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   // Used by cppVtables.cpp only
 61   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 62 }
 63 
 64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 65   // Used by cppVtables.cpp only
 66   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 67 }
 68 
 69 void TrainingData::initialize() {
 70   // this is a nop if training modes are not enabled
 71   if (have_data() || need_data()) {
 72     // Data structures that we have do not currently support iterative training. So you cannot replay
 73     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 74     guarantee(have_data() != need_data(), "Iterative training is not supported");
 75     TrainingDataLocker::initialize();
 76   }







 77 }
 78 
 79 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 80   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 81   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 82   guarantee(td == td1, "");
 83 }
 84 
 85 void TrainingData::verify() {
 86   if (TrainingData::have_data() && !TrainingData::assembling_data()) {
 87     archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
 88       if (td->is_KlassTrainingData()) {
 89         KlassTrainingData* ktd = td->as_KlassTrainingData();
 90         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 91           Key k(ktd->holder());
 92           verify_archived_entry(td, &k);
 93         }
 94         ktd->verify();
 95       } else if (td->is_MethodTrainingData()) {
 96         MethodTrainingData* mtd = td->as_MethodTrainingData();
 97         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 98           Key k(mtd->holder());
 99           verify_archived_entry(td, &k);
100         }
101         mtd->verify(/*verify_dep_counter*/true);
102       }
103     });
104   }
105   if (TrainingData::need_data()) {
106     TrainingDataLocker l;
107     training_data_set()->iterate([&](TrainingData* td) {
108       if (td->is_KlassTrainingData()) {
109         KlassTrainingData* ktd = td->as_KlassTrainingData();
110         ktd->verify();
111       } else if (td->is_MethodTrainingData()) {
112         MethodTrainingData* mtd = td->as_MethodTrainingData();
113         // During the training run init deps tracking is not setup yet,
114         // don't verify it.
115         mtd->verify(/*verify_dep_counter*/false);
116       }
117     });
118   }
119 }
120 
121 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
122   MethodTrainingData* mtd = nullptr;
123   if (!have_data() && !need_data()) {
124     return mtd;
125   }

133   //    i.e. null_if_no_found == true, then just return a null.
134   // 3. Cache value is not null.
135   //    Return it, the value of training_data_lookup_failed doesn't matter.
136   MethodCounters* mcs = method->method_counters();
137   if (mcs != nullptr) {
138     mtd = mcs->method_training_data();
139     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
140       return mtd;
141     }
142     if (null_if_not_found && mtd == nullptr) {
143       assert(mtd == nullptr, "No training data found");
144       return nullptr;
145     }
146   } else if (use_cache) {
147     mcs = Method::build_method_counters(Thread::current(), method());
148   }
149 
150   TrainingData* td = nullptr;
151 
152   Key key(method());
153   if (have_data()) {
154     td = lookup_archived_training_data(&key);
155     if (td != nullptr) {
156       mtd = td->as_MethodTrainingData();
157     } else {
158       mtd = nullptr;
159     }
160     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
161     method->init_training_data(mtd);
162   }
163 
164   if (need_data()) {
165     TrainingDataLocker l;
166     td = training_data_set()->find(&key);
167     if (td == nullptr) {
168       if (!null_if_not_found) {
169         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
170         if (ktd == nullptr) {
171           return nullptr; // allocation failure
172         }
173         mtd = MethodTrainingData::allocate(method(), ktd);

293     for (int i = 0, len = _init_deps.length(); i < len; i++) {
294       st->print(" dep:");
295       _init_deps.at(i)->print_on(st, true);
296     }
297   }
298 }
299 
300 void CompileTrainingData::notice_inlined_method(CompileTask* task,
301                                                 const methodHandle& method) {
302   MethodTrainingData* mtd = MethodTrainingData::make(method);
303   if (mtd != nullptr) {
304     mtd->notice_compilation(task->comp_level(), true);
305   }
306 }
307 
308 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
309   // A JIT is starting to look at class k.
310   // We could follow the queries that it is making, but it is
311   // simpler to assume, conservatively, that the JIT will
312   // eventually depend on the initialization state of k.
313   CompileTask* task = env->task();
314   assert(task != nullptr, "");
315   Method* method = task->method();
316   if (what->is_metadata()) {
317     ciMetadata* md = what->as_metadata();
318     if (md->is_loaded() && md->is_instance_klass()) {
319       ciInstanceKlass* cik = md->as_instance_klass();
320 
321       if (cik->is_initialized()) {
322         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
323         KlassTrainingData* ktd = KlassTrainingData::make(ik);
324         if (ktd == nullptr) {
325           // Allocation failure or snapshot in progress
326           return;
327         }
328         // This JIT task is (probably) requesting that ik be initialized,
329         // so add him to my _init_deps list.
330         TrainingDataLocker l;
331         if (l.can_add()) {
332           add_init_dep(ktd);
333         }
334       }
335     }
336   }
337 }
338 
339 void KlassTrainingData::prepare(Visitor& visitor) {
340   if (visitor.is_visited(this)) {
341     return;
342   }
343   visitor.visit(this);
344   _comp_deps.prepare();
345 }
346 
347 void MethodTrainingData::prepare(Visitor& visitor) {
348   if (visitor.is_visited(this)) {
349     return;
350   }
351   visitor.visit(this);
352   klass()->prepare(visitor);
353   if (has_holder()) {
354     _final_counters = holder()->method_counters();

360   for (int i = 0; i < CompLevel_count - 1; i++) {
361     CompileTrainingData* ctd = _last_toplevel_compiles[i];
362     if (ctd != nullptr) {
363       ctd->prepare(visitor);
364     }
365   }
366 }
367 
368 void CompileTrainingData::prepare(Visitor& visitor) {
369   if (visitor.is_visited(this)) {
370     return;
371   }
372   visitor.visit(this);
373   method()->prepare(visitor);
374   _init_deps.prepare();
375   _ci_records.prepare();
376 }
377 
378 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
379   Key key(holder);
380   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
381   KlassTrainingData* ktd = nullptr;
382   if (td != nullptr) {
383     ktd = td->as_KlassTrainingData();
384     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
385     if (ktd->has_holder()) {
386       return ktd;
387     } else {
388       ktd = nullptr;
389     }
390   }
391   if (need_data()) {
392     TrainingDataLocker l;
393     td = training_data_set()->find(&key);
394     if (td == nullptr) {
395       if (null_if_not_found) {
396         return nullptr;
397       }
398       ktd = KlassTrainingData::allocate(holder);
399       if (ktd == nullptr) {
400         return nullptr; // allocation failure

447   OopHandle handle(Universe::vm_global(), klass->java_mirror());
448   _holder = klass;
449   assert(holder() == klass, "");
450 }
451 
452 void KlassTrainingData::notice_fully_initialized() {
453   ResourceMark rm;
454   assert(has_holder(), "");
455   assert(holder()->is_initialized(), "wrong state: %s %s",
456          holder()->name()->as_C_string(), holder()->init_state_name());
457 
458   TrainingDataLocker l; // Not a real lock if we don't collect the data,
459                         // that's why we need the atomic decrement below.
460   for (int i = 0; i < comp_dep_count(); i++) {
461     comp_dep(i)->dec_init_deps_left_release(this);
462   }
463   holder()->set_has_init_deps_processed();
464 }
465 
466 void TrainingData::init_dumptime_table(TRAPS) {
467   precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
468   if (assembling_data()) {
469     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
470     _archived_training_data_dictionary.iterate_all([&](TrainingData* record) {
471       _dumptime_training_data_dictionary->append(record);
472     });
473   }
474   if (need_data()) {
475     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
476     TrainingDataLocker l;
477     TrainingDataLocker::snapshot();
478     ResourceMark rm;
479     Visitor visitor(training_data_set()->size());
480     training_data_set()->iterate([&](TrainingData* td) {
481       td->prepare(visitor);
482       if (!td->is_CompileTrainingData()) {
483         _dumptime_training_data_dictionary->append(td);
484       }
485     });
486   }
487 


488   if (AOTVerifyTrainingData) {
489     TrainingData::verify();
490   }
491 }
492 
493 void TrainingData::iterate_roots(MetaspaceClosure* it) {
494   if (_dumptime_training_data_dictionary != nullptr) {
495     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
496       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
497     }
498   }

499 }
500 
501 void TrainingData::dump_training_data() {
502   if (_dumptime_training_data_dictionary != nullptr) {
503     CompactHashtableStats stats;
504     _archived_training_data_dictionary_for_dumping.reset();
505     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
506     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
507       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
508 #ifdef ASSERT
509       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
510         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
511         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
512       }
513 #endif // ASSERT
514       td = ArchiveBuilder::current()->get_buffered_addr(td);
515       uint hash = TrainingData::Key::cds_hash(td->key());
516       writer.add(hash, AOTCompressedPointers::encode_not_null(td));
517     }
518     writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");

524     ResourceMark rm;
525     Visitor visitor(_dumptime_training_data_dictionary->length());
526     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
527       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
528       td->cleanup(visitor);
529     }
530     // Throw away all elements with empty keys
531     int j = 0;
532     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534       if (td->key()->is_empty()) {
535         continue;
536       }
537       if (i != j) { // no need to copy if it's the same
538         _dumptime_training_data_dictionary->at_put(j, td);
539       }
540       j++;
541     }
542     _dumptime_training_data_dictionary->trunc_to(j);
543   }

544 }
545 
546 void KlassTrainingData::cleanup(Visitor& visitor) {
547   if (visitor.is_visited(this)) {
548     return;
549   }
550   visitor.visit(this);
551   if (has_holder()) {
552     bool is_excluded = !holder()->is_loaded();
553     if (CDSConfig::is_at_aot_safepoint()) {
554       // Check for AOT exclusion only at AOT safe point.
555       is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
556     }
557     if (is_excluded) {
558       ResourceMark rm;
559       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
560       _holder = nullptr;
561       key()->make_empty();
562     }
563   }

637       tty->cr();
638     }
639     guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
640   }
641 }
642 
643 void CompileTrainingData::cleanup(Visitor& visitor) {
644   if (visitor.is_visited(this)) {
645     return;
646   }
647   visitor.visit(this);
648   method()->cleanup(visitor);
649 }
650 
651 void TrainingData::serialize(SerializeClosure* soc) {
652   if (soc->writing()) {
653     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
654   } else {
655     _archived_training_data_dictionary.serialize_header(soc);
656   }

657 }
658 
659 class TrainingDataPrinter : StackObj {
660   outputStream* _st;
661   int _index;
662 public:
663   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
664   void do_value(TrainingData* td) {
665     const char* type = (td->is_KlassTrainingData()   ? "K" :
666                         td->is_MethodTrainingData()  ? "M" :
667                         td->is_CompileTrainingData() ? "C" : "?");
668     _st->print("%4d: %p %s ", _index++, td, type);
669     td->print_on(_st);
670     _st->cr();
671     if (td->is_KlassTrainingData()) {
672       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
673         ResourceMark rm;
674         _st->print_raw("  C ");
675         ctd->print_on(_st);
676         _st->cr();
677       });
678     } else if (td->is_MethodTrainingData()) {
679       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
680         ResourceMark rm;
681         _st->print_raw("  C ");
682         ctd->print_on(_st);
683         _st->cr();
684       });
685     } else if (td->is_CompileTrainingData()) {
686       // ?
687     }
688   }
689 };
690 
691 void TrainingData::print_archived_training_data_on(outputStream* st) {
692   st->print_cr("Archived TrainingData Dictionary");
693   TrainingDataPrinter tdp(st);
694   TrainingDataLocker::initialize();
695   _archived_training_data_dictionary.iterate_all(&tdp);

696 }
697 
698 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
699   iter->push(const_cast<Metadata**>(&_meta));
700 }
701 
702 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
703   _key.metaspace_pointers_do(iter);
704 }
705 
706 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
707   return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
708 }
709 
710 uint TrainingData::Key::cds_hash(const Key* const& k) {
711   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
712 }
713 
714 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {

715   // For this to work, all components of the key must be in shared metaspace.
716   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
717     return nullptr;
718   }
719   uint hash = TrainingData::Key::cds_hash(k);
720   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
721   if (td != nullptr) {
722     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
723         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
724       return td;
725     } else {
726       ShouldNotReachHere();
727     }
728   }
729   return nullptr;
730 }
731 
732 template <typename T>
733 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
734   iter->push(&_deps);

752   iter->push(&_final_profile);
753   iter->push(&_final_counters);
754 }
755 
756 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
757   log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
758   TrainingData::metaspace_pointers_do(iter);
759   _init_deps.metaspace_pointers_do(iter);
760   _ci_records.metaspace_pointers_do(iter);
761   iter->push(&_method);
762 }
763 
764 template <typename T>
765 void TrainingData::DepList<T>::prepare() {
766   if (_deps == nullptr && _deps_dyn != nullptr) {
767     int len = _deps_dyn->length();
768     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
769     for (int i = 0; i < len; i++) {
770       _deps->at_put(i, _deps_dyn->at(i)); // copy
771     }

772   }
773 }
774 
775 void KlassTrainingData::remove_unshareable_info() {
776   TrainingData::remove_unshareable_info();
777   _comp_deps.remove_unshareable_info();
778 }
779 
780 void MethodTrainingData::remove_unshareable_info() {
781   TrainingData::remove_unshareable_info();
782   if (_final_counters != nullptr) {
783     _final_counters->remove_unshareable_info();
784   }
785   if (_final_profile != nullptr) {
786     _final_profile->remove_unshareable_info();
787   }
788 }
789 
790 void CompileTrainingData::remove_unshareable_info() {
791   TrainingData::remove_unshareable_info();

 21  * questions.
 22  *
 23  */
 24 
 25 #include "cds/aotCompressedPointers.hpp"
 26 #include "cds/cdsConfig.hpp"
 27 #include "ci/ciEnv.hpp"
 28 #include "ci/ciMetadata.hpp"
 29 #include "classfile/compactHashtable.hpp"
 30 #include "classfile/javaClasses.hpp"
 31 #include "classfile/symbolTable.hpp"
 32 #include "classfile/systemDictionaryShared.hpp"
 33 #include "compiler/compileTask.hpp"
 34 #include "memory/metadataFactory.hpp"
 35 #include "memory/metaspaceClosure.hpp"
 36 #include "memory/resourceArea.hpp"
 37 #include "memory/universe.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/method.inline.hpp"
 40 #include "oops/methodCounters.hpp"
 41 #include "oops/recompilationSchedule.hpp"
 42 #include "oops/trainingData.hpp"
 43 #include "runtime/arguments.hpp"
 44 #include "runtime/javaThread.inline.hpp"
 45 #include "runtime/jniHandles.inline.hpp"
 46 #include "utilities/growableArray.hpp"
 47 
 48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 52 int TrainingData::TrainingDataLocker::_lock_mode;
 53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 54 
 55 MethodTrainingData::MethodTrainingData() {
 56   // Used by cppVtables.cpp only
 57   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 58 }
 59 
 60 KlassTrainingData::KlassTrainingData() {
 61   // Used by cppVtables.cpp only
 62   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 63 }
 64 
 65 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 66   // Used by cppVtables.cpp only
 67   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 68 }
 69 
 70 void TrainingData::initialize() {
 71   // this is a nop if training modes are not enabled
 72   if (have_data() || need_data()) {



 73     TrainingDataLocker::initialize();
 74   }
 75   RecompilationSchedule::initialize();
 76   if (have_data() && need_data()) {
 77     TrainingDataLocker l;
 78     archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
 79       training_data_set()->install(td);
 80     });
 81   }
 82 }
 83 
 84 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 85   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 86   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 87   guarantee(td == td1, "");
 88 }
 89 
 90 void TrainingData::verify() {
 91   if (have_data() && !need_data() && !assembling_data()) {
 92     archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
 93       if (td->is_KlassTrainingData()) {
 94         KlassTrainingData* ktd = td->as_KlassTrainingData();
 95         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 96           Key k(ktd->holder());
 97           verify_archived_entry(td, &k);
 98         }
 99         ktd->verify();
100       } else if (td->is_MethodTrainingData()) {
101         MethodTrainingData* mtd = td->as_MethodTrainingData();
102         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
103           Key k(mtd->holder());
104           verify_archived_entry(td, &k);
105         }
106         mtd->verify(/*verify_dep_counter*/true);
107       }
108     });
109   }
110   if (need_data()) {
111     TrainingDataLocker l;
112     training_data_set()->iterate([&](TrainingData* td) {
113       if (td->is_KlassTrainingData()) {
114         KlassTrainingData* ktd = td->as_KlassTrainingData();
115         ktd->verify();
116       } else if (td->is_MethodTrainingData()) {
117         MethodTrainingData* mtd = td->as_MethodTrainingData();
118         // During the training run init deps tracking is not setup yet,
119         // don't verify it.
120         mtd->verify(/*verify_dep_counter*/false);
121       }
122     });
123   }
124 }
125 
126 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
127   MethodTrainingData* mtd = nullptr;
128   if (!have_data() && !need_data()) {
129     return mtd;
130   }

138   //    i.e. null_if_no_found == true, then just return a null.
139   // 3. Cache value is not null.
140   //    Return it, the value of training_data_lookup_failed doesn't matter.
141   MethodCounters* mcs = method->method_counters();
142   if (mcs != nullptr) {
143     mtd = mcs->method_training_data();
144     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
145       return mtd;
146     }
147     if (null_if_not_found && mtd == nullptr) {
148       assert(mtd == nullptr, "No training data found");
149       return nullptr;
150     }
151   } else if (use_cache) {
152     mcs = Method::build_method_counters(Thread::current(), method());
153   }
154 
155   TrainingData* td = nullptr;
156 
157   Key key(method());
158   if (have_data() && !need_data()) {
159     td = lookup_archived_training_data(&key);
160     if (td != nullptr) {
161       mtd = td->as_MethodTrainingData();
162     } else {
163       mtd = nullptr;
164     }
165     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
166     method->init_training_data(mtd);
167   }
168 
169   if (need_data()) {
170     TrainingDataLocker l;
171     td = training_data_set()->find(&key);
172     if (td == nullptr) {
173       if (!null_if_not_found) {
174         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
175         if (ktd == nullptr) {
176           return nullptr; // allocation failure
177         }
178         mtd = MethodTrainingData::allocate(method(), ktd);

298     for (int i = 0, len = _init_deps.length(); i < len; i++) {
299       st->print(" dep:");
300       _init_deps.at(i)->print_on(st, true);
301     }
302   }
303 }
304 
305 void CompileTrainingData::notice_inlined_method(CompileTask* task,
306                                                 const methodHandle& method) {
307   MethodTrainingData* mtd = MethodTrainingData::make(method);
308   if (mtd != nullptr) {
309     mtd->notice_compilation(task->comp_level(), true);
310   }
311 }
312 
313 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
314   // A JIT is starting to look at class k.
315   // We could follow the queries that it is making, but it is
316   // simpler to assume, conservatively, that the JIT will
317   // eventually depend on the initialization state of k.
318   ciMetadata* md = nullptr;
319   if (what->is_object()) {
320     md = what->as_object()->klass();
321   } else if (what->is_metadata()) {
322     md = what->as_metadata();
323   }
324   if (md != nullptr && md->is_loaded() && md->is_instance_klass()) {
325     ciInstanceKlass* cik = md->as_instance_klass();
326     if (!cik->is_initialized()) {
327       return;
328     }
329     KlassTrainingData* ktd = KlassTrainingData::make(cik->get_instanceKlass());
330     if (ktd == nullptr) {
331       // Allocation failure or snapshot in progress
332       return;
333     }
334     // This JIT task is (probably) requesting that ik be initialized,
335     // so add it to my _init_deps list.
336     TrainingDataLocker l;
337     if (l.can_add()) {
338       add_init_dep(ktd);

339     }
340   }
341 }
342 
343 void KlassTrainingData::prepare(Visitor& visitor) {
344   if (visitor.is_visited(this)) {
345     return;
346   }
347   visitor.visit(this);
348   _comp_deps.prepare();
349 }
350 
351 void MethodTrainingData::prepare(Visitor& visitor) {
352   if (visitor.is_visited(this)) {
353     return;
354   }
355   visitor.visit(this);
356   klass()->prepare(visitor);
357   if (has_holder()) {
358     _final_counters = holder()->method_counters();

364   for (int i = 0; i < CompLevel_count - 1; i++) {
365     CompileTrainingData* ctd = _last_toplevel_compiles[i];
366     if (ctd != nullptr) {
367       ctd->prepare(visitor);
368     }
369   }
370 }
371 
372 void CompileTrainingData::prepare(Visitor& visitor) {
373   if (visitor.is_visited(this)) {
374     return;
375   }
376   visitor.visit(this);
377   method()->prepare(visitor);
378   _init_deps.prepare();
379   _ci_records.prepare();
380 }
381 
382 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
383   Key key(holder);
384   TrainingData* td = CDS_ONLY((have_data() && !need_data()) ? lookup_archived_training_data(&key) :) nullptr;
385   KlassTrainingData* ktd = nullptr;
386   if (td != nullptr) {
387     ktd = td->as_KlassTrainingData();
388     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
389     if (ktd->has_holder()) {
390       return ktd;
391     } else {
392       ktd = nullptr;
393     }
394   }
395   if (need_data()) {
396     TrainingDataLocker l;
397     td = training_data_set()->find(&key);
398     if (td == nullptr) {
399       if (null_if_not_found) {
400         return nullptr;
401       }
402       ktd = KlassTrainingData::allocate(holder);
403       if (ktd == nullptr) {
404         return nullptr; // allocation failure

451   OopHandle handle(Universe::vm_global(), klass->java_mirror());
452   _holder = klass;
453   assert(holder() == klass, "");
454 }
455 
456 void KlassTrainingData::notice_fully_initialized() {
457   ResourceMark rm;
458   assert(has_holder(), "");
459   assert(holder()->is_initialized(), "wrong state: %s %s",
460          holder()->name()->as_C_string(), holder()->init_state_name());
461 
462   TrainingDataLocker l; // Not a real lock if we don't collect the data,
463                         // that's why we need the atomic decrement below.
464   for (int i = 0; i < comp_dep_count(); i++) {
465     comp_dep(i)->dec_init_deps_left_release(this);
466   }
467   holder()->set_has_init_deps_processed();
468 }
469 
470 void TrainingData::init_dumptime_table(TRAPS) {
471   if (assembling_data() && !need_data()) {

472     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
473     _archived_training_data_dictionary.iterate_all([&](TrainingData* record) {
474       _dumptime_training_data_dictionary->append(record);
475     });
476   }
477   if (need_data()) {
478     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
479     TrainingDataLocker l;
480     TrainingDataLocker::snapshot();
481     ResourceMark rm;
482     Visitor visitor(training_data_set()->size());
483     training_data_set()->iterate([&](TrainingData* td) {
484       td->prepare(visitor);
485       if (!td->is_CompileTrainingData()) {
486         _dumptime_training_data_dictionary->append(td);
487       }
488     });
489   }
490 
491   RecompilationSchedule::prepare(CHECK);
492 
493   if (AOTVerifyTrainingData) {
494     TrainingData::verify();
495   }
496 }
497 
498 void TrainingData::iterate_roots(MetaspaceClosure* it) {
499   if (_dumptime_training_data_dictionary != nullptr) {
500     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
501       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
502     }
503   }
504   RecompilationSchedule::iterate_roots(it);
505 }
506 
507 void TrainingData::dump_training_data() {
508   if (_dumptime_training_data_dictionary != nullptr) {
509     CompactHashtableStats stats;
510     _archived_training_data_dictionary_for_dumping.reset();
511     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
512     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
513       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
514 #ifdef ASSERT
515       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
516         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
517         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
518       }
519 #endif // ASSERT
520       td = ArchiveBuilder::current()->get_buffered_addr(td);
521       uint hash = TrainingData::Key::cds_hash(td->key());
522       writer.add(hash, AOTCompressedPointers::encode_not_null(td));
523     }
524     writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");

530     ResourceMark rm;
531     Visitor visitor(_dumptime_training_data_dictionary->length());
532     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534       td->cleanup(visitor);
535     }
536     // Throw away all elements with empty keys
537     int j = 0;
538     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
539       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
540       if (td->key()->is_empty()) {
541         continue;
542       }
543       if (i != j) { // no need to copy if it's the same
544         _dumptime_training_data_dictionary->at_put(j, td);
545       }
546       j++;
547     }
548     _dumptime_training_data_dictionary->trunc_to(j);
549   }
550   RecompilationSchedule::cleanup();
551 }
552 
553 void KlassTrainingData::cleanup(Visitor& visitor) {
554   if (visitor.is_visited(this)) {
555     return;
556   }
557   visitor.visit(this);
558   if (has_holder()) {
559     bool is_excluded = !holder()->is_loaded();
560     if (CDSConfig::is_at_aot_safepoint()) {
561       // Check for AOT exclusion only at AOT safe point.
562       is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
563     }
564     if (is_excluded) {
565       ResourceMark rm;
566       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
567       _holder = nullptr;
568       key()->make_empty();
569     }
570   }

644       tty->cr();
645     }
646     guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
647   }
648 }
649 
650 void CompileTrainingData::cleanup(Visitor& visitor) {
651   if (visitor.is_visited(this)) {
652     return;
653   }
654   visitor.visit(this);
655   method()->cleanup(visitor);
656 }
657 
658 void TrainingData::serialize(SerializeClosure* soc) {
659   if (soc->writing()) {
660     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
661   } else {
662     _archived_training_data_dictionary.serialize_header(soc);
663   }
664   RecompilationSchedule::serialize(soc);
665 }
666 
667 class TrainingDataPrinter : StackObj {
668   outputStream* _st;
669   int _index;
670 public:
671   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
672   void do_value(TrainingData* td) {
673     const char* type = (td->is_KlassTrainingData()   ? "K" :
674                         td->is_MethodTrainingData()  ? "M" :
675                         td->is_CompileTrainingData() ? "C" : "?");
676     _st->print("%4d: %p %s ", _index++, td, type);
677     td->print_on(_st);
678     _st->cr();
679     if (td->is_KlassTrainingData()) {
680       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
681         ResourceMark rm;
682         _st->print_raw("  C ");
683         ctd->print_on(_st);
684         _st->cr();
685       });
686     } else if (td->is_MethodTrainingData()) {
687       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
688         ResourceMark rm;
689         _st->print_raw("  C ");
690         ctd->print_on(_st);
691         _st->cr();
692       });
693     } else if (td->is_CompileTrainingData()) {
694       // ?
695     }
696   }
697 };
698 
699 void TrainingData::print_archived_training_data_on(outputStream* st) {
700   st->print_cr("Archived TrainingData Dictionary");
701   TrainingDataPrinter tdp(st);
702   TrainingDataLocker::initialize();
703   _archived_training_data_dictionary.iterate_all(&tdp);
704   RecompilationSchedule::print_archived_training_data_on(st);
705 }
706 
707 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
708   iter->push(const_cast<Metadata**>(&_meta));
709 }
710 
711 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
712   _key.metaspace_pointers_do(iter);
713 }
714 
715 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
716   return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
717 }
718 
719 uint TrainingData::Key::cds_hash(const Key* const& k) {
720   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
721 }
722 
723 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
724   assert(!need_data(), "Should be used only in read-only mode");
725   // For this to work, all components of the key must be in shared metaspace.
726   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
727     return nullptr;
728   }
729   uint hash = TrainingData::Key::cds_hash(k);
730   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
731   if (td != nullptr) {
732     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
733         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
734       return td;
735     } else {
736       ShouldNotReachHere();
737     }
738   }
739   return nullptr;
740 }
741 
742 template <typename T>
743 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
744   iter->push(&_deps);

762   iter->push(&_final_profile);
763   iter->push(&_final_counters);
764 }
765 
766 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
767   log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
768   TrainingData::metaspace_pointers_do(iter);
769   _init_deps.metaspace_pointers_do(iter);
770   _ci_records.metaspace_pointers_do(iter);
771   iter->push(&_method);
772 }
773 
774 template <typename T>
775 void TrainingData::DepList<T>::prepare() {
776   if (_deps == nullptr && _deps_dyn != nullptr) {
777     int len = _deps_dyn->length();
778     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
779     for (int i = 0; i < len; i++) {
780       _deps->at_put(i, _deps_dyn->at(i)); // copy
781     }
782     _deps_dyn = nullptr;
783   }
784 }
785 
786 void KlassTrainingData::remove_unshareable_info() {
787   TrainingData::remove_unshareable_info();
788   _comp_deps.remove_unshareable_info();
789 }
790 
791 void MethodTrainingData::remove_unshareable_info() {
792   TrainingData::remove_unshareable_info();
793   if (_final_counters != nullptr) {
794     _final_counters->remove_unshareable_info();
795   }
796   if (_final_profile != nullptr) {
797     _final_profile->remove_unshareable_info();
798   }
799 }
800 
801 void CompileTrainingData::remove_unshareable_info() {
802   TrainingData::remove_unshareable_info();
< prev index next >