1 /*
  2  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/trainingData.hpp"
 41 #include "runtime/arguments.hpp"
 42 #include "runtime/javaThread.inline.hpp"
 43 #include "runtime/jniHandles.inline.hpp"
 44 #include "utilities/growableArray.hpp"
 45 
 46 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 47 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 49 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 50 int TrainingData::TrainingDataLocker::_lock_mode;
 51 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 52 
 53 MethodTrainingData::MethodTrainingData() {
 54   // Used by cppVtables.cpp only
 55   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 56 }
 57 
 58 KlassTrainingData::KlassTrainingData() {
 59   // Used by cppVtables.cpp only
 60   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 61 }
 62 
 63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 64   // Used by cppVtables.cpp only
 65   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 66 }
 67 
 68 void TrainingData::initialize() {
 69   // this is a nop if training modes are not enabled
 70   if (have_data() || need_data()) {
 71     // Data structures that we have do not currently support iterative training. So you cannot replay
 72     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 73     guarantee(have_data() != need_data(), "Iterative training is not supported");
 74     TrainingDataLocker::initialize();
 75   }
 76 }
 77 
 78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 79   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 80   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 81   guarantee(td == td1, "");
 82 }
 83 
 84 void TrainingData::verify() {
 85   if (TrainingData::have_data()) {
 86     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 87       if (td->is_KlassTrainingData()) {
 88         KlassTrainingData* ktd = td->as_KlassTrainingData();
 89         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 90           Key k(ktd->holder());
 91           verify_archived_entry(td, &k);
 92         }
 93         ktd->verify();
 94       } else if (td->is_MethodTrainingData()) {
 95         MethodTrainingData* mtd = td->as_MethodTrainingData();
 96         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 97           Key k(mtd->holder());
 98           verify_archived_entry(td, &k);
 99         }
100         mtd->verify();
101       } else if (td->is_CompileTrainingData()) {
102         td->as_CompileTrainingData()->verify();
103       }
104     });
105   }
106 }
107 
108 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
109   MethodTrainingData* mtd = nullptr;
110   if (!have_data() && !need_data()) {
111     return mtd;
112   }
113   // Try grabbing the cached value first.
114   // Cache value is stored in MethodCounters and the following are the
115   // possible states:
116   // 1. Cached value is method_training_data_sentinel().
117   //    This is an initial state and needs a full lookup.
118   // 2. Cached value is null.
119   //    Lookup failed the last time, if we don't plan to create a new TD object,
120   //    i.e. null_if_no_found == true, then just return a null.
121   // 3. Cache value is not null.
122   //    Return it, the value of training_data_lookup_failed doesn't matter.
123   MethodCounters* mcs = method->method_counters();
124   if (mcs != nullptr) {
125     mtd = mcs->method_training_data();
126     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
127       return mtd;
128     }
129     if (null_if_not_found && mtd == nullptr) {
130       assert(mtd == nullptr, "No training data found");
131       return nullptr;
132     }
133   } else if (use_cache) {
134     mcs = Method::build_method_counters(Thread::current(), method());
135   }
136 
137   TrainingData* td = nullptr;
138 
139   Key key(method());
140   if (have_data()) {
141     td = lookup_archived_training_data(&key);
142     if (td != nullptr) {
143       mtd = td->as_MethodTrainingData();
144     } else {
145       mtd = nullptr;
146     }
147     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
148     method->init_training_data(mtd);
149   }
150 
151   if (need_data()) {
152     TrainingDataLocker l;
153     td = training_data_set()->find(&key);
154     if (td == nullptr) {
155       if (!null_if_not_found) {
156         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
157         if (ktd == nullptr) {
158           return nullptr; // allocation failure
159         }
160         mtd = MethodTrainingData::allocate(method(), ktd);
161         if (mtd == nullptr) {
162           return nullptr; // allocation failure
163         }
164         td = training_data_set()->install(mtd);
165         assert(td == mtd, "");
166       } else {
167         mtd = nullptr;
168       }
169     } else {
170       mtd = td->as_MethodTrainingData();
171     }
172     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
173     method->init_training_data(mtd);
174   }
175 
176   return mtd;
177 }
178 
179 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
180   if (has_holder()) {
181     _klass->print_on(st, true);
182     st->print(".");
183     name()->print_symbol_on(st);
184     signature()->print_symbol_on(st);
185   }
186   if (name_only) {
187     return;
188   }
189   if (!has_holder()) {
190     st->print("[SYM]");
191   }
192   if (_level_mask) {
193     st->print(" LM%d", _level_mask);
194   }
195   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
196 }
197 
198 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
199   int level = task->comp_level();
200   int compile_id = task->compile_id();
201   Thread* thread = Thread::current();
202   methodHandle m(thread, task->method());
203   if (m->method_holder() == nullptr) {
204     return nullptr; // do not record (dynamically generated method)
205   }
206   MethodTrainingData* mtd = MethodTrainingData::make(m);
207   if (mtd == nullptr) {
208     return nullptr; // allocation failure
209   }
210   mtd->notice_compilation(level);
211 
212   TrainingDataLocker l;
213   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
214   if (ctd != nullptr) {
215     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
216     if (last_ctd != nullptr) {
217       assert(mtd->highest_top_level() >= level, "consistency");
218       if (last_ctd->compile_id() < compile_id) {
219         last_ctd->clear_init_deps();
220         last_ctd = ctd;
221       }
222     } else {
223        last_ctd = ctd;
224        mtd->notice_toplevel_compilation(level);
225     }
226   }
227   return ctd;
228 }
229 
230 
231 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
232   LogStreamHandle(Trace, training) log;
233   if (log.is_enabled()) {
234     log.print("CTD "); print_on(&log); log.cr();
235     log.print("KTD "); ktd->print_on(&log); log.cr();
236   }
237   assert(ktd!= nullptr && ktd->has_holder(), "");
238   assert(_init_deps.contains(ktd), "");
239   assert(_init_deps_left > 0, "");
240 
241   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
242 
243   if (log.is_enabled()) {
244     uint init_deps_left2 = compute_init_deps_left();
245     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
246     ktd->print_on(&log, true);
247   }
248 }
249 
250 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
251   int left = 0;
252   for (int i = 0; i < _init_deps.length(); i++) {
253     KlassTrainingData* ktd = _init_deps.at(i);
254     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
255     if (ktd->has_holder()) {
256       InstanceKlass* holder = ktd->holder();
257       if (!ktd->holder()->is_initialized() || count_initialized) {
258         ++left;
259       } else if (holder->defined_by_other_loaders()) {
260         Key k(holder);
261         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
262           ++left;
263         }
264       }
265     }
266   }
267   return left;
268 }
269 
270 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
271   _method->print_on(st, true);
272   st->print("#%dL%d", _compile_id, _level);
273   if (name_only) {
274     return;
275   }
276   if (_init_deps.length() > 0) {
277     if (_init_deps_left > 0) {
278       st->print(" udeps=%d", _init_deps_left);
279     }
280     for (int i = 0, len = _init_deps.length(); i < len; i++) {
281       st->print(" dep:");
282       _init_deps.at(i)->print_on(st, true);
283     }
284   }
285 }
286 
287 void CompileTrainingData::notice_inlined_method(CompileTask* task,
288                                                 const methodHandle& method) {
289   MethodTrainingData* mtd = MethodTrainingData::make(method);
290   if (mtd != nullptr) {
291     mtd->notice_compilation(task->comp_level(), true);
292   }
293 }
294 
295 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
296   // A JIT is starting to look at class k.
297   // We could follow the queries that it is making, but it is
298   // simpler to assume, conservatively, that the JIT will
299   // eventually depend on the initialization state of k.
300   CompileTask* task = env->task();
301   assert(task != nullptr, "");
302   Method* method = task->method();
303   InstanceKlass* compiling_klass = method->method_holder();
304   if (what->is_metadata()) {
305     ciMetadata* md = what->as_metadata();
306     if (md->is_loaded() && md->is_instance_klass()) {
307       ciInstanceKlass* cik = md->as_instance_klass();
308 
309       if (cik->is_initialized()) {
310         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
311         KlassTrainingData* ktd = KlassTrainingData::make(ik);
312         if (ktd == nullptr) {
313           // Allocation failure or snapshot in progress
314           return;
315         }
316         // This JIT task is (probably) requesting that ik be initialized,
317         // so add him to my _init_deps list.
318         TrainingDataLocker l;
319         add_init_dep(ktd);
320       }
321     }
322   }
323 }
324 
325 void KlassTrainingData::prepare(Visitor& visitor) {
326   if (visitor.is_visited(this)) {
327     return;
328   }
329   visitor.visit(this);
330   ClassLoaderData* loader_data = nullptr;
331   if (_holder != nullptr) {
332     loader_data = _holder->class_loader_data();
333   } else {
334     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
335   }
336   _comp_deps.prepare(loader_data);
337 }
338 
339 void MethodTrainingData::prepare(Visitor& visitor) {
340   if (visitor.is_visited(this)) {
341     return;
342   }
343   visitor.visit(this);
344   klass()->prepare(visitor);
345   if (has_holder()) {
346     _final_counters = holder()->method_counters();
347     _final_profile  = holder()->method_data();
348     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
349   }
350   for (int i = 0; i < CompLevel_count - 1; i++) {
351     CompileTrainingData* ctd = _last_toplevel_compiles[i];
352     if (ctd != nullptr) {
353       ctd->prepare(visitor);
354     }
355   }
356 }
357 
358 void CompileTrainingData::prepare(Visitor& visitor) {
359   if (visitor.is_visited(this)) {
360     return;
361   }
362   visitor.visit(this);
363   method()->prepare(visitor);
364   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
365   _init_deps.prepare(loader_data);
366   _ci_records.prepare(loader_data);
367 }
368 
369 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
370   Key key(holder);
371   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
372   KlassTrainingData* ktd = nullptr;
373   if (td != nullptr) {
374     ktd = td->as_KlassTrainingData();
375     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
376     if (ktd->has_holder()) {
377       return ktd;
378     } else {
379       ktd = nullptr;
380     }
381   }
382   if (need_data()) {
383     TrainingDataLocker l;
384     td = training_data_set()->find(&key);
385     if (td == nullptr) {
386       if (null_if_not_found) {
387         return nullptr;
388       }
389       ktd = KlassTrainingData::allocate(holder);
390       if (ktd == nullptr) {
391         return nullptr; // allocation failure
392       }
393       td = training_data_set()->install(ktd);
394       assert(ktd == td, "");
395     } else {
396       ktd = td->as_KlassTrainingData();
397       guarantee(ktd->holder() != nullptr, "null holder");
398     }
399     assert(ktd != nullptr, "");
400     guarantee(ktd->holder() == holder, "");
401   }
402   return ktd;
403 }
404 
405 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
406   if (has_holder()) {
407     name()->print_symbol_on(st);
408     switch (holder()->init_state()) {
409       case InstanceKlass::allocated:            st->print("[A]"); break;
410       case InstanceKlass::loaded:               st->print("[D]"); break;
411       case InstanceKlass::linked:               st->print("[L]"); break;
412       case InstanceKlass::being_initialized:    st->print("[i]"); break;
413       case InstanceKlass::fully_initialized:                      break;
414       case InstanceKlass::initialization_error: st->print("[E]"); break;
415       default: fatal("unknown state: %d", holder()->init_state());
416     }
417     if (holder()->is_interface()) {
418       st->print("I");
419     }
420   } else {
421     st->print("[SYM]");
422   }
423   if (name_only) {
424     return;
425   }
426   if (_comp_deps.length() > 0) {
427     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
428       st->print(" dep:");
429       _comp_deps.at(i)->print_on(st, true);
430     }
431   }
432 }
433 
434 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
435   if (holder() == klass) {
436     return;   // no change to make
437   }
438 
439   jobject hmj = _holder_mirror;
440   if (hmj != nullptr) {   // clear out previous handle, if any
441     _holder_mirror = nullptr;
442     assert(JNIHandles::is_global_handle(hmj), "");
443     JNIHandles::destroy_global(hmj);
444   }
445 
446   if (klass != nullptr) {
447     Handle hm(JavaThread::current(), klass->java_mirror());
448     hmj = JNIHandles::make_global(hm);
449     Atomic::release_store(&_holder_mirror, hmj);
450   }
451 
452   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
453   assert(holder() == klass, "");
454 }
455 
456 void KlassTrainingData::notice_fully_initialized() {
457   ResourceMark rm;
458   assert(has_holder(), "");
459   assert(holder()->is_initialized(), "wrong state: %s %s",
460          holder()->name()->as_C_string(), holder()->init_state_name());
461 
462   TrainingDataLocker l; // Not a real lock if we don't collect the data,
463                         // that's why we need the atomic decrement below.
464   for (int i = 0; i < comp_dep_count(); i++) {
465     comp_dep(i)->dec_init_deps_left(this);
466   }
467   holder()->set_has_init_deps_processed();
468 }
469 
470 void TrainingData::init_dumptime_table(TRAPS) {
471   precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
472   if (assembling_data()) {
473     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
474     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
475       _dumptime_training_data_dictionary->append(record);
476     });
477   }
478   if (need_data()) {
479     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
480     TrainingDataLocker l;
481     TrainingDataLocker::snapshot();
482 
483     ResourceMark rm;
484     Visitor visitor(training_data_set()->size());
485     training_data_set()->iterate([&](TrainingData* td) {
486       td->prepare(visitor);
487       if (!td->is_CompileTrainingData()) {
488         _dumptime_training_data_dictionary->append(td);
489       }
490     });
491 
492     if (AOTVerifyTrainingData) {
493       training_data_set()->verify();
494     }
495   }
496 }
497 
498 void TrainingData::iterate_roots(MetaspaceClosure* it) {
499   if (_dumptime_training_data_dictionary != nullptr) {
500     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
501       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
502     }
503   }
504 }
505 
506 void TrainingData::dump_training_data() {
507   if (_dumptime_training_data_dictionary != nullptr) {
508     CompactHashtableStats stats;
509     _archived_training_data_dictionary_for_dumping.reset();
510     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
511     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
512       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
513 #ifdef ASSERT
514       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
515         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
516         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
517       }
518 #endif // ASSERT
519       td = ArchiveBuilder::current()->get_buffered_addr(td);
520       uint hash = TrainingData::Key::cds_hash(td->key());
521       u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
522       writer.add(hash, delta);
523     }
524     writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
525   }
526 }
527 
528 void TrainingData::cleanup_training_data() {
529   if (_dumptime_training_data_dictionary != nullptr) {
530     ResourceMark rm;
531     Visitor visitor(_dumptime_training_data_dictionary->length());
532     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534       td->cleanup(visitor);
535     }
536     // Throw away all elements with empty keys
537     int j = 0;
538     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
539       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
540       if (td->key()->is_empty()) {
541         continue;
542       }
543       if (i != j) { // no need to copy if it's the same
544         _dumptime_training_data_dictionary->at_put(j, td);
545       }
546       j++;
547     }
548     _dumptime_training_data_dictionary->trunc_to(j);
549   }
550 }
551 
552 void KlassTrainingData::cleanup(Visitor& visitor) {
553   if (visitor.is_visited(this)) {
554     return;
555   }
556   visitor.visit(this);
557   if (has_holder()) {
558     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
559     if (is_excluded) {
560       ResourceMark rm;
561       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
562       _holder = nullptr;
563       key()->make_empty();
564     }
565   }
566   for (int i = 0; i < _comp_deps.length(); i++) {
567     _comp_deps.at(i)->cleanup(visitor);
568   }
569 }
570 
571 void MethodTrainingData::cleanup(Visitor& visitor) {
572   if (visitor.is_visited(this)) {
573     return;
574   }
575   visitor.visit(this);
576   if (has_holder()) {
577     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
578       log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
579       if (_final_profile != nullptr && _final_profile->method() != _holder) {
580         log_warning(aot, training)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
581       }
582       _final_profile = nullptr;
583       _final_counters = nullptr;
584       _holder = nullptr;
585       key()->make_empty();
586     }
587   }
588   for (int i = 0; i < CompLevel_count - 1; i++) {
589     CompileTrainingData* ctd = _last_toplevel_compiles[i];
590     if (ctd != nullptr) {
591       ctd->cleanup(visitor);
592     }
593   }
594 }
595 
596 void KlassTrainingData::verify() {
597   for (int i = 0; i < comp_dep_count(); i++) {
598     CompileTrainingData* ctd = comp_dep(i);
599     if (!ctd->_init_deps.contains(this)) {
600       print_on(tty); tty->cr();
601       ctd->print_on(tty); tty->cr();
602     }
603     guarantee(ctd->_init_deps.contains(this), "");
604   }
605 }
606 
607 void MethodTrainingData::verify() {
608   iterate_compiles([](CompileTrainingData* ctd) {
609     ctd->verify();
610 
611     int init_deps_left1 = ctd->init_deps_left();
612     int init_deps_left2 = ctd->compute_init_deps_left();
613 
614     if (init_deps_left1 != init_deps_left2) {
615       ctd->print_on(tty); tty->cr();
616     }
617     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
618               init_deps_left1, init_deps_left2, ctd->init_deps_left());
619   });
620 }
621 
622 void CompileTrainingData::verify() {
623   for (int i = 0; i < init_dep_count(); i++) {
624     KlassTrainingData* ktd = init_dep(i);
625     if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
626       LogStreamHandle(Warning, training) log;
627       if (log.is_enabled()) {
628         ResourceMark rm;
629         log.print("CTD "); print_value_on(&log);
630         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
631       }
632     }
633     if (!ktd->_comp_deps.contains(this)) {
634       print_on(tty); tty->cr();
635       ktd->print_on(tty); tty->cr();
636     }
637     guarantee(ktd->_comp_deps.contains(this), "");
638   }
639 }
640 
641 void CompileTrainingData::cleanup(Visitor& visitor) {
642   if (visitor.is_visited(this)) {
643     return;
644   }
645   visitor.visit(this);
646   method()->cleanup(visitor);
647 }
648 
649 void TrainingData::serialize(SerializeClosure* soc) {
650   if (soc->writing()) {
651     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
652   } else {
653     _archived_training_data_dictionary.serialize_header(soc);
654   }
655 }
656 
657 class TrainingDataPrinter : StackObj {
658   outputStream* _st;
659   int _index;
660 public:
661   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
662   void do_value(TrainingData* td) {
663     const char* type = (td->is_KlassTrainingData()   ? "K" :
664                         td->is_MethodTrainingData()  ? "M" :
665                         td->is_CompileTrainingData() ? "C" : "?");
666     _st->print("%4d: %p %s ", _index++, td, type);
667     td->print_on(_st);
668     _st->cr();
669     if (td->is_KlassTrainingData()) {
670       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
671         ResourceMark rm;
672         _st->print_raw("  C ");
673         ctd->print_on(_st);
674         _st->cr();
675       });
676     } else if (td->is_MethodTrainingData()) {
677       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
678         ResourceMark rm;
679         _st->print_raw("  C ");
680         ctd->print_on(_st);
681         _st->cr();
682       });
683     } else if (td->is_CompileTrainingData()) {
684       // ?
685     }
686   }
687 };
688 
689 void TrainingData::print_archived_training_data_on(outputStream* st) {
690   st->print_cr("Archived TrainingData Dictionary");
691   TrainingDataPrinter tdp(st);
692   TrainingDataLocker::initialize();
693   _archived_training_data_dictionary.iterate(&tdp);
694 }
695 
696 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
697   iter->push(const_cast<Metadata**>(&_meta));
698 }
699 
700 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
701   _key.metaspace_pointers_do(iter);
702 }
703 
704 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
705   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
706 }
707 
708 uint TrainingData::Key::cds_hash(const Key* const& k) {
709   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
710 }
711 
712 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
713   // For this to work, all components of the key must be in shared metaspace.
714   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
715     return nullptr;
716   }
717   uint hash = TrainingData::Key::cds_hash(k);
718   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
719   if (td != nullptr) {
720     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
721         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
722       return td;
723     } else {
724       ShouldNotReachHere();
725     }
726   }
727   return nullptr;
728 }
729 
730 template <typename T>
731 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
732   iter->push(&_deps);
733 }
734 
735 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
736   log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
737   TrainingData::metaspace_pointers_do(iter);
738   _comp_deps.metaspace_pointers_do(iter);
739   iter->push(&_holder);
740 }
741 
742 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
743   log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
744   TrainingData::metaspace_pointers_do(iter);
745   iter->push(&_klass);
746   iter->push((Method**)&_holder);
747   for (int i = 0; i < CompLevel_count - 1; i++) {
748     iter->push(&_last_toplevel_compiles[i]);
749   }
750   iter->push(&_final_profile);
751   iter->push(&_final_counters);
752 }
753 
754 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
755   log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
756   TrainingData::metaspace_pointers_do(iter);
757   _init_deps.metaspace_pointers_do(iter);
758   _ci_records.metaspace_pointers_do(iter);
759   iter->push(&_method);
760 }
761 
762 template <typename T>
763 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
764   if (_deps == nullptr && _deps_dyn != nullptr) {
765     int len = _deps_dyn->length();
766     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
767     for (int i = 0; i < len; i++) {
768       _deps->at_put(i, _deps_dyn->at(i)); // copy
769     }
770   }
771 }
772 
773 void KlassTrainingData::remove_unshareable_info() {
774   TrainingData::remove_unshareable_info();
775   _holder_mirror = nullptr;
776   _comp_deps.remove_unshareable_info();
777 }
778 
779 void MethodTrainingData::remove_unshareable_info() {
780   TrainingData::remove_unshareable_info();
781   if (_final_counters != nullptr) {
782     _final_counters->remove_unshareable_info();
783   }
784   if (_final_profile != nullptr) {
785     _final_profile->remove_unshareable_info();
786   }
787 }
788 
789 void CompileTrainingData::remove_unshareable_info() {
790   TrainingData::remove_unshareable_info();
791   _init_deps.remove_unshareable_info();
792   _ci_records.remove_unshareable_info();
793   _init_deps_left = compute_init_deps_left(true);
794 }