1 /*
  2  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "cds/cdsConfig.hpp"
 26 #include "ci/ciEnv.hpp"
 27 #include "ci/ciMetadata.hpp"
 28 #include "classfile/compactHashtable.hpp"
 29 #include "classfile/javaClasses.hpp"
 30 #include "classfile/symbolTable.hpp"
 31 #include "classfile/systemDictionaryShared.hpp"
 32 #include "compiler/compileTask.hpp"
 33 #include "memory/metadataFactory.hpp"
 34 #include "memory/metaspaceClosure.hpp"
 35 #include "memory/resourceArea.hpp"
 36 #include "memory/universe.hpp"
 37 #include "oops/method.hpp"
 38 #include "oops/method.inline.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/recompilationSchedule.hpp"
 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   // Used by cppVtables.cpp only
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   // Used by cppVtables.cpp only
 61   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 62 }
 63 
 64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 65   // Used by cppVtables.cpp only
 66   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 67 }
 68 
 69 void TrainingData::initialize() {
 70   // this is a nop if training modes are not enabled
 71   if (have_data() || need_data()) {
 72     // Data structures that we have do not currently support iterative training. So you cannot replay
 73     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 74     guarantee(have_data() != need_data(), "Iterative training is not supported");
 75     TrainingDataLocker::initialize();
 76   }
 77   RecompilationSchedule::initialize();
 78 }
 79 
 80 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 81   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 82   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 83   guarantee(td == td1, "");
 84 }
 85 
 86 void TrainingData::verify() {
 87   if (TrainingData::have_data() && !TrainingData::assembling_data()) {
 88     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 89       if (td->is_KlassTrainingData()) {
 90         KlassTrainingData* ktd = td->as_KlassTrainingData();
 91         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 92           Key k(ktd->holder());
 93           verify_archived_entry(td, &k);
 94         }
 95         ktd->verify();
 96       } else if (td->is_MethodTrainingData()) {
 97         MethodTrainingData* mtd = td->as_MethodTrainingData();
 98         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 99           Key k(mtd->holder());
100           verify_archived_entry(td, &k);
101         }
102         mtd->verify(/*verify_dep_counter*/true);
103       }
104     });
105   }
106   if (TrainingData::need_data()) {
107     TrainingDataLocker l;
108     training_data_set()->iterate([&](TrainingData* td) {
109       if (td->is_KlassTrainingData()) {
110         KlassTrainingData* ktd = td->as_KlassTrainingData();
111         ktd->verify();
112       } else if (td->is_MethodTrainingData()) {
113         MethodTrainingData* mtd = td->as_MethodTrainingData();
114         // During the training run init deps tracking is not setup yet,
115         // don't verify it.
116         mtd->verify(/*verify_dep_counter*/false);
117       }
118     });
119   }
120 }
121 
122 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
123   MethodTrainingData* mtd = nullptr;
124   if (!have_data() && !need_data()) {
125     return mtd;
126   }
127   // Try grabbing the cached value first.
128   // Cache value is stored in MethodCounters and the following are the
129   // possible states:
130   // 1. Cached value is method_training_data_sentinel().
131   //    This is an initial state and needs a full lookup.
132   // 2. Cached value is null.
133   //    Lookup failed the last time, if we don't plan to create a new TD object,
134   //    i.e. null_if_no_found == true, then just return a null.
135   // 3. Cache value is not null.
136   //    Return it, the value of training_data_lookup_failed doesn't matter.
137   MethodCounters* mcs = method->method_counters();
138   if (mcs != nullptr) {
139     mtd = mcs->method_training_data();
140     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
141       return mtd;
142     }
143     if (null_if_not_found && mtd == nullptr) {
144       assert(mtd == nullptr, "No training data found");
145       return nullptr;
146     }
147   } else if (use_cache) {
148     mcs = Method::build_method_counters(Thread::current(), method());
149   }
150 
151   TrainingData* td = nullptr;
152 
153   Key key(method());
154   if (have_data()) {
155     td = lookup_archived_training_data(&key);
156     if (td != nullptr) {
157       mtd = td->as_MethodTrainingData();
158     } else {
159       mtd = nullptr;
160     }
161     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
162     method->init_training_data(mtd);
163   }
164 
165   if (need_data()) {
166     TrainingDataLocker l;
167     td = training_data_set()->find(&key);
168     if (td == nullptr) {
169       if (!null_if_not_found) {
170         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
171         if (ktd == nullptr) {
172           return nullptr; // allocation failure
173         }
174         mtd = MethodTrainingData::allocate(method(), ktd);
175         if (mtd == nullptr) {
176           return nullptr; // allocation failure
177         }
178         td = training_data_set()->install(mtd);
179         assert(td == mtd, "");
180       } else {
181         mtd = nullptr;
182       }
183     } else {
184       mtd = td->as_MethodTrainingData();
185     }
186     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
187     method->init_training_data(mtd);
188   }
189 
190   return mtd;
191 }
192 
193 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
194   if (has_holder()) {
195     _klass->print_on(st, true);
196     st->print(".");
197     name()->print_symbol_on(st);
198     signature()->print_symbol_on(st);
199   }
200   if (name_only) {
201     return;
202   }
203   if (!has_holder()) {
204     st->print("[SYM]");
205   }
206   if (_level_mask) {
207     st->print(" LM%d", _level_mask);
208   }
209   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
210 }
211 
212 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
213   int level = task->comp_level();
214   int compile_id = task->compile_id();
215   Thread* thread = Thread::current();
216   methodHandle m(thread, task->method());
217   if (m->method_holder() == nullptr) {
218     return nullptr; // do not record (dynamically generated method)
219   }
220   MethodTrainingData* mtd = MethodTrainingData::make(m);
221   if (mtd == nullptr) {
222     return nullptr; // allocation failure
223   }
224   mtd->notice_compilation(level);
225 
226   TrainingDataLocker l;
227   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
228   if (ctd != nullptr) {
229     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
230     if (last_ctd != nullptr) {
231       assert(mtd->highest_top_level() >= level, "consistency");
232       if (last_ctd->compile_id() < compile_id) {
233         last_ctd->clear_init_deps();
234         last_ctd = ctd;
235       }
236     } else {
237       last_ctd = ctd;
238       mtd->notice_toplevel_compilation(level);
239     }
240   }
241   return ctd;
242 }
243 
244 
245 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) {
246   LogStreamHandle(Trace, training) log;
247   if (log.is_enabled()) {
248     log.print("CTD "); print_on(&log); log.cr();
249     log.print("KTD "); ktd->print_on(&log); log.cr();
250   }
251   assert(ktd!= nullptr && ktd->has_holder(), "");
252   assert(_init_deps.contains(ktd), "");
253   assert(_init_deps_left > 0, "");
254 
255   uint init_deps_left1 = AtomicAccess::sub(&_init_deps_left, 1);
256 
257   if (log.is_enabled()) {
258     uint init_deps_left2 = compute_init_deps_left();
259     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
260     ktd->print_on(&log, true);
261   }
262 }
263 
264 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
265   int left = 0;
266   for (int i = 0; i < _init_deps.length(); i++) {
267     KlassTrainingData* ktd = _init_deps.at(i);
268     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
269     if (ktd->has_holder()) {
270       InstanceKlass* holder = ktd->holder();
271       if (!ktd->holder()->is_initialized() || count_initialized) {
272         ++left;
273       } else if (holder->defined_by_other_loaders()) {
274         Key k(holder);
275         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
276           ++left;
277         }
278       }
279     }
280   }
281   return left;
282 }
283 
284 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
285   _method->print_on(st, true);
286   st->print("#%dL%d", _compile_id, _level);
287   if (name_only) {
288     return;
289   }
290   if (_init_deps.length() > 0) {
291     if (_init_deps_left > 0) {
292       st->print(" udeps=%d", _init_deps_left);
293     }
294     for (int i = 0, len = _init_deps.length(); i < len; i++) {
295       st->print(" dep:");
296       _init_deps.at(i)->print_on(st, true);
297     }
298   }
299 }
300 
301 void CompileTrainingData::notice_inlined_method(CompileTask* task,
302                                                 const methodHandle& method) {
303   MethodTrainingData* mtd = MethodTrainingData::make(method);
304   if (mtd != nullptr) {
305     mtd->notice_compilation(task->comp_level(), true);
306   }
307 }
308 
309 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
310   // A JIT is starting to look at class k.
311   // We could follow the queries that it is making, but it is
312   // simpler to assume, conservatively, that the JIT will
313   // eventually depend on the initialization state of k.
314   ciMetadata* md = nullptr;
315   if (what->is_object()) {
316     md = what->as_object()->klass();
317   } else if (what->is_metadata()) {
318     md = what->as_metadata();
319   }
320   if (md != nullptr && md->is_loaded() && md->is_instance_klass()) {
321     ciInstanceKlass* cik = md->as_instance_klass();
322     if (!cik->is_initialized()) {
323       return;
324     }
325     KlassTrainingData* ktd = KlassTrainingData::make(cik->get_instanceKlass());
326     if (ktd == nullptr) {
327       // Allocation failure or snapshot in progress
328       return;
329     }
330     // This JIT task is (probably) requesting that ik be initialized,
331     // so add it to my _init_deps list.
332     TrainingDataLocker l;
333     if (l.can_add()) {
334       add_init_dep(ktd);
335     }
336   }
337 }
338 
339 void KlassTrainingData::prepare(Visitor& visitor) {
340   if (visitor.is_visited(this)) {
341     return;
342   }
343   visitor.visit(this);
344   _comp_deps.prepare();
345 }
346 
347 void MethodTrainingData::prepare(Visitor& visitor) {
348   if (visitor.is_visited(this)) {
349     return;
350   }
351   visitor.visit(this);
352   klass()->prepare(visitor);
353   if (has_holder()) {
354     _final_counters = holder()->method_counters();
355     _final_profile  = holder()->method_data();
356     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
357     _invocation_count = holder()->invocation_count();
358     _backedge_count = holder()->backedge_count();
359   }
360   for (int i = 0; i < CompLevel_count - 1; i++) {
361     CompileTrainingData* ctd = _last_toplevel_compiles[i];
362     if (ctd != nullptr) {
363       ctd->prepare(visitor);
364     }
365   }
366 }
367 
368 void CompileTrainingData::prepare(Visitor& visitor) {
369   if (visitor.is_visited(this)) {
370     return;
371   }
372   visitor.visit(this);
373   method()->prepare(visitor);
374   _init_deps.prepare();
375   _ci_records.prepare();
376 }
377 
378 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
379   Key key(holder);
380   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
381   KlassTrainingData* ktd = nullptr;
382   if (td != nullptr) {
383     ktd = td->as_KlassTrainingData();
384     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
385     if (ktd->has_holder()) {
386       return ktd;
387     } else {
388       ktd = nullptr;
389     }
390   }
391   if (need_data()) {
392     TrainingDataLocker l;
393     td = training_data_set()->find(&key);
394     if (td == nullptr) {
395       if (null_if_not_found) {
396         return nullptr;
397       }
398       ktd = KlassTrainingData::allocate(holder);
399       if (ktd == nullptr) {
400         return nullptr; // allocation failure
401       }
402       td = training_data_set()->install(ktd);
403       assert(ktd == td, "");
404     } else {
405       ktd = td->as_KlassTrainingData();
406       guarantee(ktd->holder() != nullptr, "null holder");
407     }
408     assert(ktd != nullptr, "");
409     guarantee(ktd->holder() == holder, "");
410   }
411   return ktd;
412 }
413 
414 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
415   if (has_holder()) {
416     name()->print_symbol_on(st);
417     switch (holder()->init_state()) {
418       case InstanceKlass::allocated:            st->print("[A]"); break;
419       case InstanceKlass::loaded:               st->print("[D]"); break;
420       case InstanceKlass::linked:               st->print("[L]"); break;
421       case InstanceKlass::being_initialized:    st->print("[i]"); break;
422       case InstanceKlass::fully_initialized:                      break;
423       case InstanceKlass::initialization_error: st->print("[E]"); break;
424       default: fatal("unknown state: %d", holder()->init_state());
425     }
426     if (holder()->is_interface()) {
427       st->print("I");
428     }
429   } else {
430     st->print("[SYM]");
431   }
432   if (name_only) {
433     return;
434   }
435   if (_comp_deps.length() > 0) {
436     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
437       st->print(" dep:");
438       _comp_deps.at(i)->print_on(st, true);
439     }
440   }
441 }
442 
443 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
444   assert(klass != nullptr, "");
445   // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve
446   // the handle object.
447   OopHandle handle(Universe::vm_global(), klass->java_mirror());
448   _holder = klass;
449   assert(holder() == klass, "");
450 }
451 
452 void KlassTrainingData::notice_fully_initialized() {
453   ResourceMark rm;
454   assert(has_holder(), "");
455   assert(holder()->is_initialized(), "wrong state: %s %s",
456          holder()->name()->as_C_string(), holder()->init_state_name());
457 
458   TrainingDataLocker l; // Not a real lock if we don't collect the data,
459                         // that's why we need the atomic decrement below.
460   for (int i = 0; i < comp_dep_count(); i++) {
461     comp_dep(i)->dec_init_deps_left_release(this);
462   }
463   holder()->set_has_init_deps_processed();
464 }
465 
466 void TrainingData::init_dumptime_table(TRAPS) {
467   precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
468   if (assembling_data()) {
469     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
470     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
471       _dumptime_training_data_dictionary->append(record);
472     });
473   }
474   if (need_data()) {
475     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
476     TrainingDataLocker l;
477     TrainingDataLocker::snapshot();
478     ResourceMark rm;
479     Visitor visitor(training_data_set()->size());
480     training_data_set()->iterate([&](TrainingData* td) {
481       td->prepare(visitor);
482       if (!td->is_CompileTrainingData()) {
483         _dumptime_training_data_dictionary->append(td);
484       }
485     });
486   }
487 
488   RecompilationSchedule::prepare(CHECK);
489 
490   if (AOTVerifyTrainingData) {
491     TrainingData::verify();
492   }
493 }
494 
495 void TrainingData::iterate_roots(MetaspaceClosure* it) {
496   if (_dumptime_training_data_dictionary != nullptr) {
497     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
498       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
499     }
500   }
501   RecompilationSchedule::iterate_roots(it);
502 }
503 
504 void TrainingData::dump_training_data() {
505   if (_dumptime_training_data_dictionary != nullptr) {
506     CompactHashtableStats stats;
507     _archived_training_data_dictionary_for_dumping.reset();
508     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
509     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
510       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
511 #ifdef ASSERT
512       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
513         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
514         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
515       }
516 #endif // ASSERT
517       td = ArchiveBuilder::current()->get_buffered_addr(td);
518       uint hash = TrainingData::Key::cds_hash(td->key());
519       u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
520       writer.add(hash, delta);
521     }
522     writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
523   }
524 }
525 
526 void TrainingData::cleanup_training_data() {
527   if (_dumptime_training_data_dictionary != nullptr) {
528     ResourceMark rm;
529     Visitor visitor(_dumptime_training_data_dictionary->length());
530     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
531       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
532       td->cleanup(visitor);
533     }
534     // Throw away all elements with empty keys
535     int j = 0;
536     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
537       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
538       if (td->key()->is_empty()) {
539         continue;
540       }
541       if (i != j) { // no need to copy if it's the same
542         _dumptime_training_data_dictionary->at_put(j, td);
543       }
544       j++;
545     }
546     _dumptime_training_data_dictionary->trunc_to(j);
547   }
548   RecompilationSchedule::cleanup();
549 }
550 
551 void KlassTrainingData::cleanup(Visitor& visitor) {
552   if (visitor.is_visited(this)) {
553     return;
554   }
555   visitor.visit(this);
556   if (has_holder()) {
557     bool is_excluded = !holder()->is_loaded();
558     if (CDSConfig::is_at_aot_safepoint()) {
559       // Check for AOT exclusion only at AOT safe point.
560       is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
561     }
562     if (is_excluded) {
563       ResourceMark rm;
564       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
565       _holder = nullptr;
566       key()->make_empty();
567     }
568   }
569   for (int i = 0; i < _comp_deps.length(); i++) {
570     _comp_deps.at(i)->cleanup(visitor);
571   }
572 }
573 
574 void MethodTrainingData::cleanup(Visitor& visitor) {
575   if (visitor.is_visited(this)) {
576     return;
577   }
578   visitor.visit(this);
579   if (has_holder()) {
580     if (CDSConfig::is_at_aot_safepoint() && SystemDictionaryShared::should_be_excluded(holder()->method_holder())) {
581       // Check for AOT exclusion only at AOT safe point.
582       log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
583       if (_final_profile != nullptr && _final_profile->method() != _holder) {
584         log_warning(aot, training)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
585       }
586       _final_profile = nullptr;
587       _final_counters = nullptr;
588       _holder = nullptr;
589       key()->make_empty();
590     }
591   }
592   for (int i = 0; i < CompLevel_count - 1; i++) {
593     CompileTrainingData* ctd = _last_toplevel_compiles[i];
594     if (ctd != nullptr) {
595       ctd->cleanup(visitor);
596     }
597   }
598 }
599 
600 void KlassTrainingData::verify() {
601   for (int i = 0; i < comp_dep_count(); i++) {
602     CompileTrainingData* ctd = comp_dep(i);
603     if (!ctd->_init_deps.contains(this)) {
604       print_on(tty); tty->cr();
605       ctd->print_on(tty); tty->cr();
606     }
607     guarantee(ctd->_init_deps.contains(this), "");
608   }
609 }
610 
611 void MethodTrainingData::verify(bool verify_dep_counter) {
612   iterate_compiles([&](CompileTrainingData* ctd) {
613     ctd->verify(verify_dep_counter);
614   });
615 }
616 
617 void CompileTrainingData::verify(bool verify_dep_counter) {
618   for (int i = 0; i < init_dep_count(); i++) {
619     KlassTrainingData* ktd = init_dep(i);
620     if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
621       LogStreamHandle(Info, training) log;
622       if (log.is_enabled()) {
623         ResourceMark rm;
624         log.print("CTD "); print_value_on(&log);
625         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
626       }
627     }
628     if (!ktd->_comp_deps.contains(this)) {
629       print_on(tty); tty->cr();
630       ktd->print_on(tty); tty->cr();
631     }
632     guarantee(ktd->_comp_deps.contains(this), "");
633   }
634 
635   if (verify_dep_counter) {
636     int init_deps_left1 = init_deps_left_acquire();
637     int init_deps_left2 = compute_init_deps_left();
638 
639     bool invariant = (init_deps_left1 >= init_deps_left2);
640     if (!invariant) {
641       print_on(tty);
642       tty->cr();
643     }
644     guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
645   }
646 }
647 
648 void CompileTrainingData::cleanup(Visitor& visitor) {
649   if (visitor.is_visited(this)) {
650     return;
651   }
652   visitor.visit(this);
653   method()->cleanup(visitor);
654 }
655 
656 void TrainingData::serialize(SerializeClosure* soc) {
657   if (soc->writing()) {
658     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
659   } else {
660     _archived_training_data_dictionary.serialize_header(soc);
661   }
662   RecompilationSchedule::serialize(soc);
663 }
664 
665 class TrainingDataPrinter : StackObj {
666   outputStream* _st;
667   int _index;
668 public:
669   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
670   void do_value(TrainingData* td) {
671     const char* type = (td->is_KlassTrainingData()   ? "K" :
672                         td->is_MethodTrainingData()  ? "M" :
673                         td->is_CompileTrainingData() ? "C" : "?");
674     _st->print("%4d: %p %s ", _index++, td, type);
675     td->print_on(_st);
676     _st->cr();
677     if (td->is_KlassTrainingData()) {
678       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
679         ResourceMark rm;
680         _st->print_raw("  C ");
681         ctd->print_on(_st);
682         _st->cr();
683       });
684     } else if (td->is_MethodTrainingData()) {
685       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
686         ResourceMark rm;
687         _st->print_raw("  C ");
688         ctd->print_on(_st);
689         _st->cr();
690       });
691     } else if (td->is_CompileTrainingData()) {
692       // ?
693     }
694   }
695 };
696 
697 void TrainingData::print_archived_training_data_on(outputStream* st) {
698   st->print_cr("Archived TrainingData Dictionary");
699   TrainingDataPrinter tdp(st);
700   TrainingDataLocker::initialize();
701   _archived_training_data_dictionary.iterate(&tdp);
702   RecompilationSchedule::print_archived_training_data_on(st);
703 }
704 
705 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
706   iter->push(const_cast<Metadata**>(&_meta));
707 }
708 
709 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
710   _key.metaspace_pointers_do(iter);
711 }
712 
713 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
714   return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
715 }
716 
717 uint TrainingData::Key::cds_hash(const Key* const& k) {
718   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
719 }
720 
721 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
722   // For this to work, all components of the key must be in shared metaspace.
723   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
724     return nullptr;
725   }
726   uint hash = TrainingData::Key::cds_hash(k);
727   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
728   if (td != nullptr) {
729     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
730         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
731       return td;
732     } else {
733       ShouldNotReachHere();
734     }
735   }
736   return nullptr;
737 }
738 
739 template <typename T>
740 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
741   iter->push(&_deps);
742 }
743 
744 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
745   log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
746   TrainingData::metaspace_pointers_do(iter);
747   _comp_deps.metaspace_pointers_do(iter);
748   iter->push(&_holder);
749 }
750 
751 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
752   log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
753   TrainingData::metaspace_pointers_do(iter);
754   iter->push(&_klass);
755   iter->push((Method**)&_holder);
756   for (int i = 0; i < CompLevel_count - 1; i++) {
757     iter->push(&_last_toplevel_compiles[i]);
758   }
759   iter->push(&_final_profile);
760   iter->push(&_final_counters);
761 }
762 
763 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
764   log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
765   TrainingData::metaspace_pointers_do(iter);
766   _init_deps.metaspace_pointers_do(iter);
767   _ci_records.metaspace_pointers_do(iter);
768   iter->push(&_method);
769 }
770 
771 template <typename T>
772 void TrainingData::DepList<T>::prepare() {
773   if (_deps == nullptr && _deps_dyn != nullptr) {
774     int len = _deps_dyn->length();
775     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
776     for (int i = 0; i < len; i++) {
777       _deps->at_put(i, _deps_dyn->at(i)); // copy
778     }
779   }
780 }
781 
782 void KlassTrainingData::remove_unshareable_info() {
783   TrainingData::remove_unshareable_info();
784   _comp_deps.remove_unshareable_info();
785 }
786 
787 void MethodTrainingData::remove_unshareable_info() {
788   TrainingData::remove_unshareable_info();
789   if (_final_counters != nullptr) {
790     _final_counters->remove_unshareable_info();
791   }
792   if (_final_profile != nullptr) {
793     _final_profile->remove_unshareable_info();
794   }
795 }
796 
797 void CompileTrainingData::remove_unshareable_info() {
798   TrainingData::remove_unshareable_info();
799   _init_deps.remove_unshareable_info();
800   _ci_records.remove_unshareable_info();
801   _init_deps_left = compute_init_deps_left(true);
802 }