1 /*
  2  * Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/recompilationSchedule.hpp"
 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 56 }
 57 
 58 KlassTrainingData::KlassTrainingData() {
 59   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 60 }
 61 
 62 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 63   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 64 }
 65 
 66 void TrainingData::initialize() {
 67   // this is a nop if training modes are not enabled
 68   if (have_data() || need_data()) {
 69     // Data structures that we have do not currently support iterative training. So you cannot replay
 70     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 71     guarantee(have_data() != need_data(), "Iterative training is not supported");
 72     TrainingDataLocker::initialize();
 73   }
 74   RecompilationSchedule::initialize();
 75 }
 76 
 77 #if INCLUDE_CDS
 78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 79   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 80   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 81   guarantee(td == td1, "");
 82 }
 83 #endif
 84 
 85 void TrainingData::verify() {
 86 #if INCLUDE_CDS
 87   if (TrainingData::have_data()) {
 88     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 89       if (td->is_KlassTrainingData()) {
 90         KlassTrainingData* ktd = td->as_KlassTrainingData();
 91         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 92           Key k(ktd->holder());
 93           verify_archived_entry(td, &k);
 94         }
 95         ktd->verify();
 96       } else if (td->is_MethodTrainingData()) {
 97         MethodTrainingData* mtd = td->as_MethodTrainingData();
 98         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 99           Key k(mtd->holder());
100           verify_archived_entry(td, &k);
101         }
102         mtd->verify();
103       } else if (td->is_CompileTrainingData()) {
104         td->as_CompileTrainingData()->verify();
105       }
106     });
107   }
108 #endif
109 }
110 
111 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
112   MethodTrainingData* mtd = nullptr;
113   if (!have_data() && !need_data()) {
114     return mtd;
115   }
116   // Try grabbing the cached value first.
117   // Cache value is stored in MethodCounters and the following are the
118   // possible states:
119   // 1. Cached value is method_training_data_sentinel().
120   //    This is an initial state and needs a full lookup.
121   // 2. Cached value is null.
122   //    Lookup failed the last time, if we don't plan to create a new TD object,
123   //    i.e. null_if_no_found == true, then just return a null.
124   // 3. Cache value is not null.
125   //    Return it, the value of training_data_lookup_failed doesn't matter.
126   MethodCounters* mcs = method->method_counters();
127   if (mcs != nullptr) {
128     mtd = mcs->method_training_data();
129     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
130       return mtd;
131     }
132     if (null_if_not_found && mtd == nullptr) {
133       assert(mtd == nullptr, "No training data found");
134       return nullptr;
135     }
136   } else if (use_cache) {
137     mcs = Method::build_method_counters(Thread::current(), method());
138   }
139 
140   TrainingData* td = nullptr;
141 
142   Key key(method());
143   if (have_data()) {
144 #if INCLUDE_CDS
145     td = lookup_archived_training_data(&key);
146 #endif
147     if (td != nullptr) {
148       mtd = td->as_MethodTrainingData();
149     } else {
150       mtd = nullptr;
151     }
152     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
153     method->init_training_data(mtd);
154   }
155 
156   if (need_data()) {
157     TrainingDataLocker l;
158     td = training_data_set()->find(&key);
159     if (td == nullptr) {
160       if (!null_if_not_found) {
161         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
162         if (ktd == nullptr) {
163           return nullptr; // allocation failure
164         }
165         mtd = MethodTrainingData::allocate(method(), ktd);
166         if (mtd == nullptr) {
167           return nullptr; // allocation failure
168         }
169         td = training_data_set()->install(mtd);
170         assert(td == mtd, "");
171       } else {
172         mtd = nullptr;
173       }
174     } else {
175       mtd = td->as_MethodTrainingData();
176     }
177     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
178     method->init_training_data(mtd);
179   }
180 
181   return mtd;
182 }
183 
184 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
185   if (has_holder()) {
186     _klass->print_on(st, true);
187     st->print(".");
188     name()->print_symbol_on(st);
189     signature()->print_symbol_on(st);
190   }
191   if (name_only) {
192     return;
193   }
194   if (!has_holder()) {
195     st->print("[SYM]");
196   }
197   if (_level_mask) {
198     st->print(" LM%d", _level_mask);
199   }
200   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
201 }
202 
203 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
204   int level = task->comp_level();
205   int compile_id = task->compile_id();
206   Thread* thread = Thread::current();
207   methodHandle m(thread, task->method());
208   MethodTrainingData* mtd = MethodTrainingData::make(m);
209   if (mtd == nullptr) {
210     return nullptr; // allocation failure
211   }
212   mtd->notice_compilation(level);
213 
214   TrainingDataLocker l;
215   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
216   if (ctd != nullptr) {
217     if (mtd->_last_toplevel_compiles[level - 1] != nullptr) {
218       if (mtd->_last_toplevel_compiles[level - 1]->compile_id() < compile_id) {
219         mtd->_last_toplevel_compiles[level - 1]->clear_init_deps();
220         mtd->_last_toplevel_compiles[level - 1] = ctd;
221         mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
222       }
223     } else {
224       mtd->_last_toplevel_compiles[level - 1] = ctd;
225       mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
226     }
227   }
228   return ctd;
229 }
230 
231 
232 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
233   LogStreamHandle(Trace, training) log;
234   if (log.is_enabled()) {
235     log.print("CTD "); print_on(&log); log.cr();
236     log.print("KTD "); ktd->print_on(&log); log.cr();
237   }
238   assert(ktd!= nullptr && ktd->has_holder(), "");
239   assert(_init_deps.contains(ktd), "");
240   assert(_init_deps_left > 0, "");
241 
242   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
243 
244   if (log.is_enabled()) {
245     uint init_deps_left2 = compute_init_deps_left();
246     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
247     ktd->print_on(&log, true);
248   }
249 }
250 
251 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
252   int left = 0;
253   for (int i = 0; i < _init_deps.length(); i++) {
254     KlassTrainingData* ktd = _init_deps.at(i);
255     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
256     if (ktd->has_holder()) {
257       InstanceKlass* holder = ktd->holder();
258       if (!ktd->holder()->is_initialized() || count_initialized) {
259         ++left;
260       } else if (holder->is_shared_unregistered_class()) {
261         Key k(holder);
262         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
263           ++left;
264         }
265       }
266     }
267   }
268   return left;
269 }
270 
271 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
272   _method->print_on(st, true);
273   st->print("#%dL%d", _compile_id, _level);
274   if (name_only) {
275     return;
276   }
277   if (_init_deps.length() > 0) {
278     if (_init_deps_left > 0) {
279       st->print(" udeps=%d", _init_deps_left);
280     }
281     for (int i = 0, len = _init_deps.length(); i < len; i++) {
282       st->print(" dep:");
283       _init_deps.at(i)->print_on(st, true);
284     }
285   }
286 }
287 
288 void CompileTrainingData::notice_inlined_method(CompileTask* task,
289                                                 const methodHandle& method) {
290   MethodTrainingData* mtd = MethodTrainingData::make(method);
291   if (mtd != nullptr) {
292     mtd->notice_compilation(task->comp_level(), true);
293   }
294 }
295 
296 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
297   // A JIT is starting to look at class k.
298   // We could follow the queries that it is making, but it is
299   // simpler to assume, conservatively, that the JIT will
300   // eventually depend on the initialization state of k.
301   CompileTask* task = env->task();
302   assert(task != nullptr, "");
303   Method* method = task->method();
304   InstanceKlass* compiling_klass = method->method_holder();
305   if (what->is_metadata()) {
306     ciMetadata* md = what->as_metadata();
307     if (md->is_loaded() && md->is_instance_klass()) {
308       ciInstanceKlass* cik = md->as_instance_klass();
309 
310       if (cik->is_initialized()) {
311         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
312         KlassTrainingData* ktd = KlassTrainingData::make(ik);
313         if (ktd == nullptr) {
314           // Allocation failure or snapshot in progress
315           return;
316         }
317         // This JIT task is (probably) requesting that ik be initialized,
318         // so add him to my _init_deps list.
319         TrainingDataLocker l;
320         add_init_dep(ktd);
321       }
322     }
323   }
324 }
325 
326 void KlassTrainingData::prepare(Visitor& visitor) {
327   if (visitor.is_visited(this)) {
328     return;
329   }
330   visitor.visit(this);
331   ClassLoaderData* loader_data = nullptr;
332   if (_holder != nullptr) {
333     loader_data = _holder->class_loader_data();
334   } else {
335     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
336   }
337   _comp_deps.prepare(loader_data);
338 }
339 
340 void MethodTrainingData::prepare(Visitor& visitor) {
341   if (visitor.is_visited(this)) {
342     return;
343   }
344   visitor.visit(this);
345   klass()->prepare(visitor);
346   if (has_holder()) {
347     _final_counters = holder()->method_counters();
348     _final_profile  = holder()->method_data();
349     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
350   }
351   for (int i = 0; i < CompLevel_count; i++) {
352     CompileTrainingData* ctd = _last_toplevel_compiles[i];
353     if (ctd != nullptr) {
354       ctd->prepare(visitor);
355     }
356   }
357 }
358 
359 void CompileTrainingData::prepare(Visitor& visitor) {
360   if (visitor.is_visited(this)) {
361     return;
362   }
363   visitor.visit(this);
364   method()->prepare(visitor);
365   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
366   _init_deps.prepare(loader_data);
367   _ci_records.prepare(loader_data);
368 }
369 
370 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
371   Key key(holder);
372   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
373   KlassTrainingData* ktd = nullptr;
374   if (td != nullptr) {
375     ktd = td->as_KlassTrainingData();
376     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
377     if (ktd->has_holder()) {
378       return ktd;
379     } else {
380       ktd = nullptr;
381     }
382   }
383   if (need_data()) {
384     TrainingDataLocker l;
385     td = training_data_set()->find(&key);
386     if (td == nullptr) {
387       if (null_if_not_found) {
388         return nullptr;
389       }
390       ktd = KlassTrainingData::allocate(holder);
391       if (ktd == nullptr) {
392         return nullptr; // allocation failure
393       }
394       td = training_data_set()->install(ktd);
395       assert(ktd == td, "");
396     } else {
397       ktd = td->as_KlassTrainingData();
398       guarantee(ktd->holder() != nullptr, "null holder");
399     }
400     assert(ktd != nullptr, "");
401     guarantee(ktd->holder() == holder, "");
402   }
403   return ktd;
404 }
405 
406 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
407   if (has_holder()) {
408     name()->print_symbol_on(st);
409     switch (holder()->init_state()) {
410       case InstanceKlass::allocated:            st->print("[A]"); break;
411       case InstanceKlass::loaded:               st->print("[D]"); break;
412       case InstanceKlass::linked:               st->print("[L]"); break;
413       case InstanceKlass::being_initialized:    st->print("[i]"); break;
414       case InstanceKlass::fully_initialized:                      break;
415       case InstanceKlass::initialization_error: st->print("[E]"); break;
416       default: fatal("unknown state: %d", holder()->init_state());
417     }
418     if (holder()->is_interface()) {
419       st->print("I");
420     }
421   } else {
422     st->print("[SYM]");
423   }
424   if (name_only) {
425     return;
426   }
427   if (_comp_deps.length() > 0) {
428     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
429       st->print(" dep:");
430       _comp_deps.at(i)->print_on(st, true);
431     }
432   }
433 }
434 
435 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
436   if (holder() == klass) {
437     return;   // no change to make
438   }
439 
440   jobject hmj = _holder_mirror;
441   if (hmj != nullptr) {   // clear out previous handle, if any
442     _holder_mirror = nullptr;
443     assert(JNIHandles::is_global_handle(hmj), "");
444     JNIHandles::destroy_global(hmj);
445   }
446 
447   if (klass != nullptr) {
448     Handle hm(JavaThread::current(), klass->java_mirror());
449     hmj = JNIHandles::make_global(hm);
450     Atomic::release_store(&_holder_mirror, hmj);
451   }
452 
453   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
454   assert(holder() == klass, "");
455 }
456 
457 void KlassTrainingData::notice_fully_initialized() {
458   ResourceMark rm;
459   assert(has_holder(), "");
460   assert(holder()->is_initialized(), "wrong state: %s %s",
461          holder()->name()->as_C_string(), holder()->init_state_name());
462 
463   TrainingDataLocker l; // Not a real lock if we don't collect the data,
464                         // that's why we need the atomic decrement below.
465   for (int i = 0; i < comp_dep_count(); i++) {
466     comp_dep(i)->dec_init_deps_left(this);
467   }
468   holder()->set_has_init_deps_processed();
469 }
470 
471 void TrainingData::init_dumptime_table(TRAPS) {
472   if (!need_data()) {
473     return;
474   }
475   _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
476   if (CDSConfig::is_dumping_final_static_archive()) {
477     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
478       _dumptime_training_data_dictionary->append(record);
479     });
480   } else {
481     TrainingDataLocker l;
482     TrainingDataLocker::snapshot();
483 
484     ResourceMark rm;
485     Visitor visitor(training_data_set()->size());
486     training_data_set()->iterate([&](TrainingData* td) {
487       td->prepare(visitor);
488       if (!td->is_CompileTrainingData()) {
489         _dumptime_training_data_dictionary->append(td);
490       }
491     });
492 
493     if (VerifyTrainingData) {
494       training_data_set()->verify();
495     }
496   }
497 
498   RecompilationSchedule::prepare(CHECK);
499 }
500 
501 #if INCLUDE_CDS
502 void TrainingData::iterate_roots(MetaspaceClosure* it) {
503   if (!need_data()) {
504     return;
505   }
506   assert(_dumptime_training_data_dictionary != nullptr, "");
507   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
508     _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
509   }
510   RecompilationSchedule::iterate_roots(it);
511 }
512 
513 void TrainingData::dump_training_data() {
514   if (!need_data()) {
515     return;
516   }
517   write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping);
518 }
519 
520 void TrainingData::cleanup_training_data() {
521   if (_dumptime_training_data_dictionary != nullptr) {
522     ResourceMark rm;
523     Visitor visitor(_dumptime_training_data_dictionary->length());
524     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
525       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
526       td->cleanup(visitor);
527     }
528     // Throw away all elements with empty keys
529     int j = 0;
530     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
531       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
532       if (td->key()->is_empty()) {
533         continue;
534       }
535       if (i != j) { // no need to copy if it's the same
536         _dumptime_training_data_dictionary->at_put(j, td);
537       }
538       j++;
539     }
540     _dumptime_training_data_dictionary->trunc_to(j);
541   }
542   RecompilationSchedule::cleanup();
543 }
544 
545 void KlassTrainingData::cleanup(Visitor& visitor) {
546   if (visitor.is_visited(this)) {
547     return;
548   }
549   visitor.visit(this);
550   if (has_holder()) {
551     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
552     if (is_excluded) {
553       ResourceMark rm;
554       log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name());
555       _holder = nullptr;
556       key()->make_empty();
557     }
558   }
559   for (int i = 0; i < _comp_deps.length(); i++) {
560     _comp_deps.at(i)->cleanup(visitor);
561   }
562 }
563 
564 void MethodTrainingData::cleanup(Visitor& visitor) {
565   if (visitor.is_visited(this)) {
566     return;
567   }
568   visitor.visit(this);
569   if (has_holder()) {
570     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
571       log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
572       if (_final_profile != nullptr && _final_profile->method() != _holder) {
573         log_warning(cds)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
574       }
575       _final_profile = nullptr;
576       _final_counters = nullptr;
577       _holder = nullptr;
578       key()->make_empty();
579     }
580   }
581   for (int i = 0; i < CompLevel_count; i++) {
582     CompileTrainingData* ctd = _last_toplevel_compiles[i];
583     if (ctd != nullptr) {
584       ctd->cleanup(visitor);
585     }
586   }
587 }
588 
589 void KlassTrainingData::verify() {
590   for (int i = 0; i < comp_dep_count(); i++) {
591     CompileTrainingData* ctd = comp_dep(i);
592     if (!ctd->_init_deps.contains(this)) {
593       print_on(tty); tty->cr();
594       ctd->print_on(tty); tty->cr();
595     }
596     guarantee(ctd->_init_deps.contains(this), "");
597   }
598 }
599 
600 void MethodTrainingData::verify() {
601   iterate_compiles([](CompileTrainingData* ctd) {
602     ctd->verify();
603 
604     int init_deps_left1 = ctd->init_deps_left();
605     int init_deps_left2 = ctd->compute_init_deps_left();
606 
607     if (init_deps_left1 != init_deps_left2) {
608       ctd->print_on(tty); tty->cr();
609     }
610     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
611               init_deps_left1, init_deps_left2, ctd->init_deps_left());
612   });
613 }
614 
615 void CompileTrainingData::verify() {
616   for (int i = 0; i < init_dep_count(); i++) {
617     KlassTrainingData* ktd = init_dep(i);
618     if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) {
619       LogStreamHandle(Warning, training) log;
620       if (log.is_enabled()) {
621         ResourceMark rm;
622         log.print("CTD "); print_value_on(&log);
623         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
624       }
625     }
626     if (!ktd->_comp_deps.contains(this)) {
627       print_on(tty); tty->cr();
628       ktd->print_on(tty); tty->cr();
629     }
630     guarantee(ktd->_comp_deps.contains(this), "");
631   }
632 }
633 
634 void CompileTrainingData::cleanup(Visitor& visitor) {
635   if (visitor.is_visited(this)) {
636     return;
637   }
638   visitor.visit(this);
639   method()->cleanup(visitor);
640 }
641 
642 void TrainingData::serialize_training_data(SerializeClosure* soc) {
643   if (soc->writing()) {
644     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
645   } else {
646     _archived_training_data_dictionary.serialize_header(soc);
647   }
648   RecompilationSchedule::serialize_training_data(soc);
649 }
650 
651 class TrainingDataPrinter : StackObj {
652   outputStream* _st;
653   int _index;
654 public:
655   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
656   void do_value(TrainingData* td) {
657     const char* type = (td->is_KlassTrainingData()   ? "K" :
658                         td->is_MethodTrainingData()  ? "M" :
659                         td->is_CompileTrainingData() ? "C" : "?");
660     _st->print("%4d: %p %s ", _index++, td, type);
661     td->print_on(_st);
662     _st->cr();
663     if (td->is_KlassTrainingData()) {
664       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
665         ResourceMark rm;
666         _st->print_raw("  C ");
667         ctd->print_on(_st);
668         _st->cr();
669       });
670     } else if (td->is_MethodTrainingData()) {
671       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
672         ResourceMark rm;
673         _st->print_raw("  C ");
674         ctd->print_on(_st);
675         _st->cr();
676       });
677     } else if (td->is_CompileTrainingData()) {
678       // ?
679     }
680   }
681 };
682 
683 void TrainingData::print_archived_training_data_on(outputStream* st) {
684   st->print_cr("Archived TrainingData Dictionary");
685   TrainingDataPrinter tdp(st);
686   TrainingDataLocker::initialize();
687   _archived_training_data_dictionary.iterate(&tdp);
688   RecompilationSchedule::print_archived_training_data_on(st);
689 }
690 
691 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
692   iter->push(const_cast<Metadata**>(&_meta));
693 }
694 
695 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
696   _key.metaspace_pointers_do(iter);
697 }
698 
699 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
700   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
701 }
702 
703 uint TrainingData::Key::cds_hash(const Key* const& k) {
704   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
705 }
706 
707 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) {
708   if (!need_data()) {
709     return;
710   }
711   assert(_dumptime_training_data_dictionary != nullptr, "");
712   CompactHashtableStats stats;
713   dictionary->reset();
714   CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
715   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
716     TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
717 #ifdef ASSERT
718     for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
719       TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
720       assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
721     }
722 #endif // ASSERT
723     td = ArchiveBuilder::current()->get_buffered_addr(td);
724     uint hash = TrainingData::Key::cds_hash(td->key());
725     u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
726     writer.add(hash, delta);
727   }
728   writer.dump(dictionary, "training data dictionary");
729 }
730 
731 size_t TrainingData::estimate_size_for_archive() {
732   if (_dumptime_training_data_dictionary != nullptr) {
733     return CompactHashtableWriter::estimate_size(_dumptime_training_data_dictionary->length());
734   } else {
735     return 0;
736   }
737 }
738 
739 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
740   // For this to work, all components of the key must be in shared metaspace.
741   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
742     return nullptr;
743   }
744   uint hash = TrainingData::Key::cds_hash(k);
745   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
746   if (td != nullptr) {
747     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
748         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
749       return td;
750     } else {
751       ShouldNotReachHere();
752     }
753   }
754   return nullptr;
755 }
756 #endif
757 
758 template <typename T>
759 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
760   iter->push(&_deps);
761 }
762 
763 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
764   log_trace(cds)("Iter(KlassTrainingData): %p", this);
765 #if INCLUDE_CDS
766   TrainingData::metaspace_pointers_do(iter);
767 #endif
768   _comp_deps.metaspace_pointers_do(iter);
769   iter->push(&_holder);
770 }
771 
772 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
773   log_trace(cds)("Iter(MethodTrainingData): %p", this);
774 #if INCLUDE_CDS
775   TrainingData::metaspace_pointers_do(iter);
776 #endif
777   iter->push(&_klass);
778   iter->push((Method**)&_holder);
779   for (int i = 0; i < CompLevel_count; i++) {
780     iter->push(&_last_toplevel_compiles[i]);
781   }
782   iter->push(&_final_profile);
783   iter->push(&_final_counters);
784 }
785 
786 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
787   log_trace(cds)("Iter(CompileTrainingData): %p", this);
788 #if INCLUDE_CDS
789   TrainingData::metaspace_pointers_do(iter);
790 #endif
791   _init_deps.metaspace_pointers_do(iter);
792   _ci_records.metaspace_pointers_do(iter);
793   iter->push(&_method);
794 }
795 
796 template <typename T>
797 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
798   if (_deps == nullptr && _deps_dyn != nullptr) {
799     int len = _deps_dyn->length();
800     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
801     for (int i = 0; i < len; i++) {
802       _deps->at_put(i, _deps_dyn->at(i)); // copy
803     }
804   }
805 }
806 
807 #if INCLUDE_CDS
808 void KlassTrainingData::remove_unshareable_info() {
809   TrainingData::remove_unshareable_info();
810   _holder_mirror = nullptr;
811   _comp_deps.remove_unshareable_info();
812 }
813 
814 void MethodTrainingData::remove_unshareable_info() {
815   TrainingData::remove_unshareable_info();
816   if (_final_counters != nullptr) {
817     _final_counters->remove_unshareable_info();
818   }
819   if (_final_profile != nullptr) {
820     _final_profile->remove_unshareable_info();
821   }
822 }
823 
824 void CompileTrainingData::remove_unshareable_info() {
825   TrainingData::remove_unshareable_info();
826   _init_deps.remove_unshareable_info();
827   _ci_records.remove_unshareable_info();
828   _init_deps_left = compute_init_deps_left(true);
829 }
830 
831 #endif // INCLUDE_CDS