1 /*
  2  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/recompilationSchedule.hpp"
 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   // Used by cppVtables.cpp only
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   // Used by cppVtables.cpp only
 61   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 62 }
 63 
 64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 65   // Used by cppVtables.cpp only
 66   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 67 }
 68 
 69 void TrainingData::initialize() {
 70   // this is a nop if training modes are not enabled
 71   if (have_data() || need_data()) {
 72     // Data structures that we have do not currently support iterative training. So you cannot replay
 73     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 74     guarantee(have_data() != need_data(), "Iterative training is not supported");
 75     TrainingDataLocker::initialize();
 76   }
 77   RecompilationSchedule::initialize();
 78 }
 79 
 80 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 81   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 82   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 83   guarantee(td == td1, "");
 84 }
 85 
 86 void TrainingData::verify() {
 87   if (TrainingData::have_data()) {
 88     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 89       if (td->is_KlassTrainingData()) {
 90         KlassTrainingData* ktd = td->as_KlassTrainingData();
 91         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 92           Key k(ktd->holder());
 93           verify_archived_entry(td, &k);
 94         }
 95         ktd->verify();
 96       } else if (td->is_MethodTrainingData()) {
 97         MethodTrainingData* mtd = td->as_MethodTrainingData();
 98         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 99           Key k(mtd->holder());
100           verify_archived_entry(td, &k);
101         }
102         mtd->verify();
103       } else if (td->is_CompileTrainingData()) {
104         td->as_CompileTrainingData()->verify();
105       }
106     });
107   }
108 }
109 
110 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
111   MethodTrainingData* mtd = nullptr;
112   if (!have_data() && !need_data()) {
113     return mtd;
114   }
115   // Try grabbing the cached value first.
116   // Cache value is stored in MethodCounters and the following are the
117   // possible states:
118   // 1. Cached value is method_training_data_sentinel().
119   //    This is an initial state and needs a full lookup.
120   // 2. Cached value is null.
121   //    Lookup failed the last time, if we don't plan to create a new TD object,
122   //    i.e. null_if_no_found == true, then just return a null.
123   // 3. Cache value is not null.
124   //    Return it, the value of training_data_lookup_failed doesn't matter.
125   MethodCounters* mcs = method->method_counters();
126   if (mcs != nullptr) {
127     mtd = mcs->method_training_data();
128     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
129       return mtd;
130     }
131     if (null_if_not_found && mtd == nullptr) {
132       assert(mtd == nullptr, "No training data found");
133       return nullptr;
134     }
135   } else if (use_cache) {
136     mcs = Method::build_method_counters(Thread::current(), method());
137   }
138 
139   TrainingData* td = nullptr;
140 
141   Key key(method());
142   if (have_data()) {
143     td = lookup_archived_training_data(&key);
144     if (td != nullptr) {
145       mtd = td->as_MethodTrainingData();
146     } else {
147       mtd = nullptr;
148     }
149     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
150     method->init_training_data(mtd);
151   }
152 
153   if (need_data()) {
154     TrainingDataLocker l;
155     td = training_data_set()->find(&key);
156     if (td == nullptr) {
157       if (!null_if_not_found) {
158         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
159         if (ktd == nullptr) {
160           return nullptr; // allocation failure
161         }
162         mtd = MethodTrainingData::allocate(method(), ktd);
163         if (mtd == nullptr) {
164           return nullptr; // allocation failure
165         }
166         td = training_data_set()->install(mtd);
167         assert(td == mtd, "");
168       } else {
169         mtd = nullptr;
170       }
171     } else {
172       mtd = td->as_MethodTrainingData();
173     }
174     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
175     method->init_training_data(mtd);
176   }
177 
178   return mtd;
179 }
180 
181 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
182   if (has_holder()) {
183     _klass->print_on(st, true);
184     st->print(".");
185     name()->print_symbol_on(st);
186     signature()->print_symbol_on(st);
187   }
188   if (name_only) {
189     return;
190   }
191   if (!has_holder()) {
192     st->print("[SYM]");
193   }
194   if (_level_mask) {
195     st->print(" LM%d", _level_mask);
196   }
197   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
198 }
199 
200 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
201   int level = task->comp_level();
202   int compile_id = task->compile_id();
203   Thread* thread = Thread::current();
204   methodHandle m(thread, task->method());
205   if (m->method_holder() == nullptr) {
206     return nullptr; // do not record (dynamically generated method)
207   }
208   MethodTrainingData* mtd = MethodTrainingData::make(m);
209   if (mtd == nullptr) {
210     return nullptr; // allocation failure
211   }
212   mtd->notice_compilation(level);
213 
214   TrainingDataLocker l;
215   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
216   if (ctd != nullptr) {
217     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
218     if (last_ctd != nullptr) {
219       assert(mtd->highest_top_level() >= level, "consistency");
220       if (last_ctd->compile_id() < compile_id) {
221         last_ctd->clear_init_deps();
222         last_ctd = ctd;
223       }
224     } else {
225       last_ctd = ctd;
226       mtd->notice_toplevel_compilation(level);
227     }
228   }
229   return ctd;
230 }
231 
232 
233 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
234   LogStreamHandle(Trace, training) log;
235   if (log.is_enabled()) {
236     log.print("CTD "); print_on(&log); log.cr();
237     log.print("KTD "); ktd->print_on(&log); log.cr();
238   }
239   assert(ktd!= nullptr && ktd->has_holder(), "");
240   assert(_init_deps.contains(ktd), "");
241   assert(_init_deps_left > 0, "");
242 
243   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
244 
245   if (log.is_enabled()) {
246     uint init_deps_left2 = compute_init_deps_left();
247     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
248     ktd->print_on(&log, true);
249   }
250 }
251 
252 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
253   int left = 0;
254   for (int i = 0; i < _init_deps.length(); i++) {
255     KlassTrainingData* ktd = _init_deps.at(i);
256     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
257     if (ktd->has_holder()) {
258       InstanceKlass* holder = ktd->holder();
259       if (!ktd->holder()->is_initialized() || count_initialized) {
260         ++left;
261       } else if (holder->defined_by_other_loaders()) {
262         Key k(holder);
263         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
264           ++left;
265         }
266       }
267     }
268   }
269   return left;
270 }
271 
272 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
273   _method->print_on(st, true);
274   st->print("#%dL%d", _compile_id, _level);
275   if (name_only) {
276     return;
277   }
278   if (_init_deps.length() > 0) {
279     if (_init_deps_left > 0) {
280       st->print(" udeps=%d", _init_deps_left);
281     }
282     for (int i = 0, len = _init_deps.length(); i < len; i++) {
283       st->print(" dep:");
284       _init_deps.at(i)->print_on(st, true);
285     }
286   }
287 }
288 
289 void CompileTrainingData::notice_inlined_method(CompileTask* task,
290                                                 const methodHandle& method) {
291   MethodTrainingData* mtd = MethodTrainingData::make(method);
292   if (mtd != nullptr) {
293     mtd->notice_compilation(task->comp_level(), true);
294   }
295 }
296 
297 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
298   // A JIT is starting to look at class k.
299   // We could follow the queries that it is making, but it is
300   // simpler to assume, conservatively, that the JIT will
301   // eventually depend on the initialization state of k.
302   CompileTask* task = env->task();
303   assert(task != nullptr, "");
304   Method* method = task->method();
305   InstanceKlass* compiling_klass = method->method_holder();
306   if (what->is_metadata()) {
307     ciMetadata* md = what->as_metadata();
308     if (md->is_loaded() && md->is_instance_klass()) {
309       ciInstanceKlass* cik = md->as_instance_klass();
310 
311       if (cik->is_initialized()) {
312         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
313         KlassTrainingData* ktd = KlassTrainingData::make(ik);
314         if (ktd == nullptr) {
315           // Allocation failure or snapshot in progress
316           return;
317         }
318         // This JIT task is (probably) requesting that ik be initialized,
319         // so add him to my _init_deps list.
320         TrainingDataLocker l;
321         add_init_dep(ktd);
322       }
323     }
324   }
325 }
326 
327 void KlassTrainingData::prepare(Visitor& visitor) {
328   if (visitor.is_visited(this)) {
329     return;
330   }
331   visitor.visit(this);
332   ClassLoaderData* loader_data = nullptr;
333   if (_holder != nullptr) {
334     loader_data = _holder->class_loader_data();
335   } else {
336     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
337   }
338   _comp_deps.prepare(loader_data);
339 }
340 
341 void MethodTrainingData::prepare(Visitor& visitor) {
342   if (visitor.is_visited(this)) {
343     return;
344   }
345   visitor.visit(this);
346   klass()->prepare(visitor);
347   if (has_holder()) {
348     _final_counters = holder()->method_counters();
349     _final_profile  = holder()->method_data();
350     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
351   }
352   for (int i = 0; i < CompLevel_count - 1; i++) {
353     CompileTrainingData* ctd = _last_toplevel_compiles[i];
354     if (ctd != nullptr) {
355       ctd->prepare(visitor);
356     }
357   }
358 }
359 
360 void CompileTrainingData::prepare(Visitor& visitor) {
361   if (visitor.is_visited(this)) {
362     return;
363   }
364   visitor.visit(this);
365   method()->prepare(visitor);
366   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
367   _init_deps.prepare(loader_data);
368   _ci_records.prepare(loader_data);
369 }
370 
371 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
372   Key key(holder);
373   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
374   KlassTrainingData* ktd = nullptr;
375   if (td != nullptr) {
376     ktd = td->as_KlassTrainingData();
377     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
378     if (ktd->has_holder()) {
379       return ktd;
380     } else {
381       ktd = nullptr;
382     }
383   }
384   if (need_data()) {
385     TrainingDataLocker l;
386     td = training_data_set()->find(&key);
387     if (td == nullptr) {
388       if (null_if_not_found) {
389         return nullptr;
390       }
391       ktd = KlassTrainingData::allocate(holder);
392       if (ktd == nullptr) {
393         return nullptr; // allocation failure
394       }
395       td = training_data_set()->install(ktd);
396       assert(ktd == td, "");
397     } else {
398       ktd = td->as_KlassTrainingData();
399       guarantee(ktd->holder() != nullptr, "null holder");
400     }
401     assert(ktd != nullptr, "");
402     guarantee(ktd->holder() == holder, "");
403   }
404   return ktd;
405 }
406 
407 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
408   if (has_holder()) {
409     name()->print_symbol_on(st);
410     switch (holder()->init_state()) {
411       case InstanceKlass::allocated:            st->print("[A]"); break;
412       case InstanceKlass::loaded:               st->print("[D]"); break;
413       case InstanceKlass::linked:               st->print("[L]"); break;
414       case InstanceKlass::being_initialized:    st->print("[i]"); break;
415       case InstanceKlass::fully_initialized:                      break;
416       case InstanceKlass::initialization_error: st->print("[E]"); break;
417       default: fatal("unknown state: %d", holder()->init_state());
418     }
419     if (holder()->is_interface()) {
420       st->print("I");
421     }
422   } else {
423     st->print("[SYM]");
424   }
425   if (name_only) {
426     return;
427   }
428   if (_comp_deps.length() > 0) {
429     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
430       st->print(" dep:");
431       _comp_deps.at(i)->print_on(st, true);
432     }
433   }
434 }
435 
436 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
437   if (holder() == klass) {
438     return;   // no change to make
439   }
440 
441   jobject hmj = _holder_mirror;
442   if (hmj != nullptr) {   // clear out previous handle, if any
443     _holder_mirror = nullptr;
444     assert(JNIHandles::is_global_handle(hmj), "");
445     JNIHandles::destroy_global(hmj);
446   }
447 
448   if (klass != nullptr) {
449     Handle hm(JavaThread::current(), klass->java_mirror());
450     hmj = JNIHandles::make_global(hm);
451     Atomic::release_store(&_holder_mirror, hmj);
452   }
453 
454   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
455   assert(holder() == klass, "");
456 }
457 
458 void KlassTrainingData::notice_fully_initialized() {
459   ResourceMark rm;
460   assert(has_holder(), "");
461   assert(holder()->is_initialized(), "wrong state: %s %s",
462          holder()->name()->as_C_string(), holder()->init_state_name());
463 
464   TrainingDataLocker l; // Not a real lock if we don't collect the data,
465                         // that's why we need the atomic decrement below.
466   for (int i = 0; i < comp_dep_count(); i++) {
467     comp_dep(i)->dec_init_deps_left(this);
468   }
469   holder()->set_has_init_deps_processed();
470 }
471 
472 void TrainingData::init_dumptime_table(TRAPS) {
473   precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
474   if (assembling_data()) {
475     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
476     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
477       _dumptime_training_data_dictionary->append(record);
478     });
479   }
480   if (need_data()) {
481     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
482     TrainingDataLocker l;
483     TrainingDataLocker::snapshot();
484 
485     ResourceMark rm;
486     Visitor visitor(training_data_set()->size());
487     training_data_set()->iterate([&](TrainingData* td) {
488       td->prepare(visitor);
489       if (!td->is_CompileTrainingData()) {
490         _dumptime_training_data_dictionary->append(td);
491       }
492     });
493 
494     if (AOTVerifyTrainingData) {
495       training_data_set()->verify();
496     }
497   }
498 
499   RecompilationSchedule::prepare(CHECK);
500 }
501 
502 void TrainingData::iterate_roots(MetaspaceClosure* it) {
503   if (_dumptime_training_data_dictionary != nullptr) {
504     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
505       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
506     }
507   }
508   RecompilationSchedule::iterate_roots(it);
509 }
510 
511 void TrainingData::dump_training_data() {
512   if (_dumptime_training_data_dictionary != nullptr) {
513     CompactHashtableStats stats;
514     _archived_training_data_dictionary_for_dumping.reset();
515     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
516     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
517       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
518 #ifdef ASSERT
519       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
520         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
521         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
522       }
523 #endif // ASSERT
524       td = ArchiveBuilder::current()->get_buffered_addr(td);
525       uint hash = TrainingData::Key::cds_hash(td->key());
526       u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
527       writer.add(hash, delta);
528     }
529     writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
530   }
531 }
532 
533 void TrainingData::cleanup_training_data() {
534   if (_dumptime_training_data_dictionary != nullptr) {
535     ResourceMark rm;
536     Visitor visitor(_dumptime_training_data_dictionary->length());
537     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
538       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
539       td->cleanup(visitor);
540     }
541     // Throw away all elements with empty keys
542     int j = 0;
543     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
544       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
545       if (td->key()->is_empty()) {
546         continue;
547       }
548       if (i != j) { // no need to copy if it's the same
549         _dumptime_training_data_dictionary->at_put(j, td);
550       }
551       j++;
552     }
553     _dumptime_training_data_dictionary->trunc_to(j);
554   }
555   RecompilationSchedule::cleanup();
556 }
557 
558 void KlassTrainingData::cleanup(Visitor& visitor) {
559   if (visitor.is_visited(this)) {
560     return;
561   }
562   visitor.visit(this);
563   if (has_holder()) {
564     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
565     if (is_excluded) {
566       ResourceMark rm;
567       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
568       _holder = nullptr;
569       key()->make_empty();
570     }
571   }
572   for (int i = 0; i < _comp_deps.length(); i++) {
573     _comp_deps.at(i)->cleanup(visitor);
574   }
575 }
576 
577 void MethodTrainingData::cleanup(Visitor& visitor) {
578   if (visitor.is_visited(this)) {
579     return;
580   }
581   visitor.visit(this);
582   if (has_holder()) {
583     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
584       log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
585       if (_final_profile != nullptr && _final_profile->method() != _holder) {
586         log_warning(aot, training)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
587       }
588       _final_profile = nullptr;
589       _final_counters = nullptr;
590       _holder = nullptr;
591       key()->make_empty();
592     }
593   }
594   for (int i = 0; i < CompLevel_count - 1; i++) {
595     CompileTrainingData* ctd = _last_toplevel_compiles[i];
596     if (ctd != nullptr) {
597       ctd->cleanup(visitor);
598     }
599   }
600 }
601 
602 void KlassTrainingData::verify() {
603   for (int i = 0; i < comp_dep_count(); i++) {
604     CompileTrainingData* ctd = comp_dep(i);
605     if (!ctd->_init_deps.contains(this)) {
606       print_on(tty); tty->cr();
607       ctd->print_on(tty); tty->cr();
608     }
609     guarantee(ctd->_init_deps.contains(this), "");
610   }
611 }
612 
613 void MethodTrainingData::verify() {
614   iterate_compiles([](CompileTrainingData* ctd) {
615     ctd->verify();
616 
617     int init_deps_left1 = ctd->init_deps_left();
618     int init_deps_left2 = ctd->compute_init_deps_left();
619 
620     if (init_deps_left1 != init_deps_left2) {
621       ctd->print_on(tty); tty->cr();
622     }
623     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
624               init_deps_left1, init_deps_left2, ctd->init_deps_left());
625   });
626 }
627 
628 void CompileTrainingData::verify() {
629   for (int i = 0; i < init_dep_count(); i++) {
630     KlassTrainingData* ktd = init_dep(i);
631     if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
632       LogStreamHandle(Warning, training) log;
633       if (log.is_enabled()) {
634         ResourceMark rm;
635         log.print("CTD "); print_value_on(&log);
636         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
637       }
638     }
639     if (!ktd->_comp_deps.contains(this)) {
640       print_on(tty); tty->cr();
641       ktd->print_on(tty); tty->cr();
642     }
643     guarantee(ktd->_comp_deps.contains(this), "");
644   }
645 }
646 
647 void CompileTrainingData::cleanup(Visitor& visitor) {
648   if (visitor.is_visited(this)) {
649     return;
650   }
651   visitor.visit(this);
652   method()->cleanup(visitor);
653 }
654 
655 void TrainingData::serialize(SerializeClosure* soc) {
656   if (soc->writing()) {
657     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
658   } else {
659     _archived_training_data_dictionary.serialize_header(soc);
660   }
661   RecompilationSchedule::serialize(soc);
662 }
663 
664 class TrainingDataPrinter : StackObj {
665   outputStream* _st;
666   int _index;
667 public:
668   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
669   void do_value(TrainingData* td) {
670     const char* type = (td->is_KlassTrainingData()   ? "K" :
671                         td->is_MethodTrainingData()  ? "M" :
672                         td->is_CompileTrainingData() ? "C" : "?");
673     _st->print("%4d: %p %s ", _index++, td, type);
674     td->print_on(_st);
675     _st->cr();
676     if (td->is_KlassTrainingData()) {
677       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
678         ResourceMark rm;
679         _st->print_raw("  C ");
680         ctd->print_on(_st);
681         _st->cr();
682       });
683     } else if (td->is_MethodTrainingData()) {
684       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
685         ResourceMark rm;
686         _st->print_raw("  C ");
687         ctd->print_on(_st);
688         _st->cr();
689       });
690     } else if (td->is_CompileTrainingData()) {
691       // ?
692     }
693   }
694 };
695 
696 void TrainingData::print_archived_training_data_on(outputStream* st) {
697   st->print_cr("Archived TrainingData Dictionary");
698   TrainingDataPrinter tdp(st);
699   TrainingDataLocker::initialize();
700   _archived_training_data_dictionary.iterate(&tdp);
701   RecompilationSchedule::print_archived_training_data_on(st);
702 }
703 
704 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
705   iter->push(const_cast<Metadata**>(&_meta));
706 }
707 
708 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
709   _key.metaspace_pointers_do(iter);
710 }
711 
712 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
713   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
714 }
715 
716 uint TrainingData::Key::cds_hash(const Key* const& k) {
717   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
718 }
719 
720 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
721   // For this to work, all components of the key must be in shared metaspace.
722   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
723     return nullptr;
724   }
725   uint hash = TrainingData::Key::cds_hash(k);
726   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
727   if (td != nullptr) {
728     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
729         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
730       return td;
731     } else {
732       ShouldNotReachHere();
733     }
734   }
735   return nullptr;
736 }
737 
738 template <typename T>
739 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
740   iter->push(&_deps);
741 }
742 
743 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
744   log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
745   TrainingData::metaspace_pointers_do(iter);
746   _comp_deps.metaspace_pointers_do(iter);
747   iter->push(&_holder);
748 }
749 
750 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
751   log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
752   TrainingData::metaspace_pointers_do(iter);
753   iter->push(&_klass);
754   iter->push((Method**)&_holder);
755   for (int i = 0; i < CompLevel_count - 1; i++) {
756     iter->push(&_last_toplevel_compiles[i]);
757   }
758   iter->push(&_final_profile);
759   iter->push(&_final_counters);
760 }
761 
762 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
763   log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
764   TrainingData::metaspace_pointers_do(iter);
765   _init_deps.metaspace_pointers_do(iter);
766   _ci_records.metaspace_pointers_do(iter);
767   iter->push(&_method);
768 }
769 
770 template <typename T>
771 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
772   if (_deps == nullptr && _deps_dyn != nullptr) {
773     int len = _deps_dyn->length();
774     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
775     for (int i = 0; i < len; i++) {
776       _deps->at_put(i, _deps_dyn->at(i)); // copy
777     }
778   }
779 }
780 
781 void KlassTrainingData::remove_unshareable_info() {
782   TrainingData::remove_unshareable_info();
783   _holder_mirror = nullptr;
784   _comp_deps.remove_unshareable_info();
785 }
786 
787 void MethodTrainingData::remove_unshareable_info() {
788   TrainingData::remove_unshareable_info();
789   if (_final_counters != nullptr) {
790     _final_counters->remove_unshareable_info();
791   }
792   if (_final_profile != nullptr) {
793     _final_profile->remove_unshareable_info();
794   }
795 }
796 
797 void CompileTrainingData::remove_unshareable_info() {
798   TrainingData::remove_unshareable_info();
799   _init_deps.remove_unshareable_info();
800   _ci_records.remove_unshareable_info();
801   _init_deps_left = compute_init_deps_left(true);
802 }