1 /*
  2  * Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/recompilationSchedule.hpp"
 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 56 }
 57 
 58 KlassTrainingData::KlassTrainingData() {
 59   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 60 }
 61 
 62 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 63   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 64 }
 65 
 66 void TrainingData::initialize() {
 67   // this is a nop if training modes are not enabled
 68   if (have_data() || need_data()) {
 69     // Data structures that we have do not currently support iterative training. So you cannot replay
 70     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 71     guarantee(have_data() != need_data(), "Iterative training is not supported");
 72     TrainingDataLocker::initialize();
 73   }
 74   RecompilationSchedule::initialize();
 75 }
 76 
 77 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 78   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 79   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 80   guarantee(td == td1, "");
 81 }
 82 
 83 void TrainingData::verify() {
 84   if (TrainingData::have_data()) {
 85     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 86       if (td->is_KlassTrainingData()) {
 87         KlassTrainingData* ktd = td->as_KlassTrainingData();
 88         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 89           Key k(ktd->holder());
 90           verify_archived_entry(td, &k);
 91         }
 92         ktd->verify();
 93       } else if (td->is_MethodTrainingData()) {
 94         MethodTrainingData* mtd = td->as_MethodTrainingData();
 95         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 96           Key k(mtd->holder());
 97           verify_archived_entry(td, &k);
 98         }
 99         mtd->verify();
100       } else if (td->is_CompileTrainingData()) {
101         td->as_CompileTrainingData()->verify();
102       }
103     });
104   }
105 }
106 
107 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
108   MethodTrainingData* mtd = nullptr;
109   if (!have_data() && !need_data()) {
110     return mtd;
111   }
112   // Try grabbing the cached value first.
113   // Cache value is stored in MethodCounters and the following are the
114   // possible states:
115   // 1. Cached value is method_training_data_sentinel().
116   //    This is an initial state and needs a full lookup.
117   // 2. Cached value is null.
118   //    Lookup failed the last time, if we don't plan to create a new TD object,
119   //    i.e. null_if_no_found == true, then just return a null.
120   // 3. Cache value is not null.
121   //    Return it, the value of training_data_lookup_failed doesn't matter.
122   MethodCounters* mcs = method->method_counters();
123   if (mcs != nullptr) {
124     mtd = mcs->method_training_data();
125     if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
126       return mtd;
127     }
128     if (null_if_not_found && mtd == nullptr) {
129       assert(mtd == nullptr, "No training data found");
130       return nullptr;
131     }
132   } else if (use_cache) {
133     mcs = Method::build_method_counters(Thread::current(), method());
134   }
135 
136   TrainingData* td = nullptr;
137 
138   Key key(method());
139   if (have_data()) {
140     td = lookup_archived_training_data(&key);
141     if (td != nullptr) {
142       mtd = td->as_MethodTrainingData();
143     } else {
144       mtd = nullptr;
145     }
146     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
147     method->init_training_data(mtd);
148   }
149 
150   if (need_data()) {
151     TrainingDataLocker l;
152     td = training_data_set()->find(&key);
153     if (td == nullptr) {
154       if (!null_if_not_found) {
155         KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
156         if (ktd == nullptr) {
157           return nullptr; // allocation failure
158         }
159         mtd = MethodTrainingData::allocate(method(), ktd);
160         if (mtd == nullptr) {
161           return nullptr; // allocation failure
162         }
163         td = training_data_set()->install(mtd);
164         assert(td == mtd, "");
165       } else {
166         mtd = nullptr;
167       }
168     } else {
169       mtd = td->as_MethodTrainingData();
170     }
171     // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
172     method->init_training_data(mtd);
173   }
174 
175   return mtd;
176 }
177 
178 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
179   if (has_holder()) {
180     _klass->print_on(st, true);
181     st->print(".");
182     name()->print_symbol_on(st);
183     signature()->print_symbol_on(st);
184   }
185   if (name_only) {
186     return;
187   }
188   if (!has_holder()) {
189     st->print("[SYM]");
190   }
191   if (_level_mask) {
192     st->print(" LM%d", _level_mask);
193   }
194   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
195 }
196 
197 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
198   int level = task->comp_level();
199   int compile_id = task->compile_id();
200   Thread* thread = Thread::current();
201   methodHandle m(thread, task->method());
202   if (m->method_holder() == nullptr) {
203     return nullptr; // do not record (dynamically generated method)
204   }
205   MethodTrainingData* mtd = MethodTrainingData::make(m);
206   if (mtd == nullptr) {
207     return nullptr; // allocation failure
208   }
209   mtd->notice_compilation(level);
210 
211   TrainingDataLocker l;
212   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
213   if (ctd != nullptr) {
214     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
215     if (last_ctd != nullptr) {
216       assert(mtd->highest_top_level() >= level, "consistency");
217       if (last_ctd->compile_id() < compile_id) {
218         last_ctd->clear_init_deps();
219         last_ctd = ctd;
220       }
221     } else {
222       last_ctd = ctd;
223       mtd->notice_toplevel_compilation(level);
224     }
225   }
226   return ctd;
227 }
228 
229 
230 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
231   LogStreamHandle(Trace, training) log;
232   if (log.is_enabled()) {
233     log.print("CTD "); print_on(&log); log.cr();
234     log.print("KTD "); ktd->print_on(&log); log.cr();
235   }
236   assert(ktd!= nullptr && ktd->has_holder(), "");
237   assert(_init_deps.contains(ktd), "");
238   assert(_init_deps_left > 0, "");
239 
240   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
241 
242   if (log.is_enabled()) {
243     uint init_deps_left2 = compute_init_deps_left();
244     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
245     ktd->print_on(&log, true);
246   }
247 }
248 
249 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
250   int left = 0;
251   for (int i = 0; i < _init_deps.length(); i++) {
252     KlassTrainingData* ktd = _init_deps.at(i);
253     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
254     if (ktd->has_holder()) {
255       InstanceKlass* holder = ktd->holder();
256       if (!ktd->holder()->is_initialized() || count_initialized) {
257         ++left;
258       } else if (holder->is_shared_unregistered_class()) {
259         Key k(holder);
260         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
261           ++left;
262         }
263       }
264     }
265   }
266   return left;
267 }
268 
269 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
270   _method->print_on(st, true);
271   st->print("#%dL%d", _compile_id, _level);
272   if (name_only) {
273     return;
274   }
275   if (_init_deps.length() > 0) {
276     if (_init_deps_left > 0) {
277       st->print(" udeps=%d", _init_deps_left);
278     }
279     for (int i = 0, len = _init_deps.length(); i < len; i++) {
280       st->print(" dep:");
281       _init_deps.at(i)->print_on(st, true);
282     }
283   }
284 }
285 
286 void CompileTrainingData::notice_inlined_method(CompileTask* task,
287                                                 const methodHandle& method) {
288   MethodTrainingData* mtd = MethodTrainingData::make(method);
289   if (mtd != nullptr) {
290     mtd->notice_compilation(task->comp_level(), true);
291   }
292 }
293 
294 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
295   // A JIT is starting to look at class k.
296   // We could follow the queries that it is making, but it is
297   // simpler to assume, conservatively, that the JIT will
298   // eventually depend on the initialization state of k.
299   CompileTask* task = env->task();
300   assert(task != nullptr, "");
301   Method* method = task->method();
302   InstanceKlass* compiling_klass = method->method_holder();
303   if (what->is_metadata()) {
304     ciMetadata* md = what->as_metadata();
305     if (md->is_loaded() && md->is_instance_klass()) {
306       ciInstanceKlass* cik = md->as_instance_klass();
307 
308       if (cik->is_initialized()) {
309         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
310         KlassTrainingData* ktd = KlassTrainingData::make(ik);
311         if (ktd == nullptr) {
312           // Allocation failure or snapshot in progress
313           return;
314         }
315         // This JIT task is (probably) requesting that ik be initialized,
316         // so add him to my _init_deps list.
317         TrainingDataLocker l;
318         add_init_dep(ktd);
319       }
320     }
321   }
322 }
323 
324 void KlassTrainingData::prepare(Visitor& visitor) {
325   if (visitor.is_visited(this)) {
326     return;
327   }
328   visitor.visit(this);
329   ClassLoaderData* loader_data = nullptr;
330   if (_holder != nullptr) {
331     loader_data = _holder->class_loader_data();
332   } else {
333     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
334   }
335   _comp_deps.prepare(loader_data);
336 }
337 
338 void MethodTrainingData::prepare(Visitor& visitor) {
339   if (visitor.is_visited(this)) {
340     return;
341   }
342   visitor.visit(this);
343   klass()->prepare(visitor);
344   if (has_holder()) {
345     _final_counters = holder()->method_counters();
346     _final_profile  = holder()->method_data();
347     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
348   }
349   for (int i = 0; i < CompLevel_count - 1; i++) {
350     CompileTrainingData* ctd = _last_toplevel_compiles[i];
351     if (ctd != nullptr) {
352       ctd->prepare(visitor);
353     }
354   }
355 }
356 
357 void CompileTrainingData::prepare(Visitor& visitor) {
358   if (visitor.is_visited(this)) {
359     return;
360   }
361   visitor.visit(this);
362   method()->prepare(visitor);
363   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
364   _init_deps.prepare(loader_data);
365   _ci_records.prepare(loader_data);
366 }
367 
368 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
369   Key key(holder);
370   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
371   KlassTrainingData* ktd = nullptr;
372   if (td != nullptr) {
373     ktd = td->as_KlassTrainingData();
374     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
375     if (ktd->has_holder()) {
376       return ktd;
377     } else {
378       ktd = nullptr;
379     }
380   }
381   if (need_data()) {
382     TrainingDataLocker l;
383     td = training_data_set()->find(&key);
384     if (td == nullptr) {
385       if (null_if_not_found) {
386         return nullptr;
387       }
388       ktd = KlassTrainingData::allocate(holder);
389       if (ktd == nullptr) {
390         return nullptr; // allocation failure
391       }
392       td = training_data_set()->install(ktd);
393       assert(ktd == td, "");
394     } else {
395       ktd = td->as_KlassTrainingData();
396       guarantee(ktd->holder() != nullptr, "null holder");
397     }
398     assert(ktd != nullptr, "");
399     guarantee(ktd->holder() == holder, "");
400   }
401   return ktd;
402 }
403 
404 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
405   if (has_holder()) {
406     name()->print_symbol_on(st);
407     switch (holder()->init_state()) {
408       case InstanceKlass::allocated:            st->print("[A]"); break;
409       case InstanceKlass::loaded:               st->print("[D]"); break;
410       case InstanceKlass::linked:               st->print("[L]"); break;
411       case InstanceKlass::being_initialized:    st->print("[i]"); break;
412       case InstanceKlass::fully_initialized:                      break;
413       case InstanceKlass::initialization_error: st->print("[E]"); break;
414       default: fatal("unknown state: %d", holder()->init_state());
415     }
416     if (holder()->is_interface()) {
417       st->print("I");
418     }
419   } else {
420     st->print("[SYM]");
421   }
422   if (name_only) {
423     return;
424   }
425   if (_comp_deps.length() > 0) {
426     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
427       st->print(" dep:");
428       _comp_deps.at(i)->print_on(st, true);
429     }
430   }
431 }
432 
433 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
434   if (holder() == klass) {
435     return;   // no change to make
436   }
437 
438   jobject hmj = _holder_mirror;
439   if (hmj != nullptr) {   // clear out previous handle, if any
440     _holder_mirror = nullptr;
441     assert(JNIHandles::is_global_handle(hmj), "");
442     JNIHandles::destroy_global(hmj);
443   }
444 
445   if (klass != nullptr) {
446     Handle hm(JavaThread::current(), klass->java_mirror());
447     hmj = JNIHandles::make_global(hm);
448     Atomic::release_store(&_holder_mirror, hmj);
449   }
450 
451   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
452   assert(holder() == klass, "");
453 }
454 
455 void KlassTrainingData::notice_fully_initialized() {
456   ResourceMark rm;
457   assert(has_holder(), "");
458   assert(holder()->is_initialized(), "wrong state: %s %s",
459          holder()->name()->as_C_string(), holder()->init_state_name());
460 
461   TrainingDataLocker l; // Not a real lock if we don't collect the data,
462                         // that's why we need the atomic decrement below.
463   for (int i = 0; i < comp_dep_count(); i++) {
464     comp_dep(i)->dec_init_deps_left(this);
465   }
466   holder()->set_has_init_deps_processed();
467 }
468 
469 void TrainingData::init_dumptime_table(TRAPS) {
470   if (!need_data()) {
471     return;
472   }
473   _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
474   if (CDSConfig::is_dumping_final_static_archive()) {
475     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
476       _dumptime_training_data_dictionary->append(record);
477     });
478   } else {
479     TrainingDataLocker l;
480     TrainingDataLocker::snapshot();
481 
482     ResourceMark rm;
483     Visitor visitor(training_data_set()->size());
484     training_data_set()->iterate([&](TrainingData* td) {
485       td->prepare(visitor);
486       if (!td->is_CompileTrainingData()) {
487         _dumptime_training_data_dictionary->append(td);
488       }
489     });
490 
491     if (AOTVerifyTrainingData) {
492       training_data_set()->verify();
493     }
494   }
495 
496   RecompilationSchedule::prepare(CHECK);
497 }
498 
499 void TrainingData::iterate_roots(MetaspaceClosure* it) {
500   if (!need_data()) {
501     return;
502   }
503   assert(_dumptime_training_data_dictionary != nullptr, "");
504   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
505     _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
506   }
507   RecompilationSchedule::iterate_roots(it);
508 }
509 
510 void TrainingData::dump_training_data() {
511   if (!need_data()) {
512     return;
513   }
514   write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping);
515 }
516 
517 void TrainingData::cleanup_training_data() {
518   if (_dumptime_training_data_dictionary != nullptr) {
519     ResourceMark rm;
520     Visitor visitor(_dumptime_training_data_dictionary->length());
521     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
522       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
523       td->cleanup(visitor);
524     }
525     // Throw away all elements with empty keys
526     int j = 0;
527     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
528       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
529       if (td->key()->is_empty()) {
530         continue;
531       }
532       if (i != j) { // no need to copy if it's the same
533         _dumptime_training_data_dictionary->at_put(j, td);
534       }
535       j++;
536     }
537     _dumptime_training_data_dictionary->trunc_to(j);
538   }
539   RecompilationSchedule::cleanup();
540 }
541 
542 void KlassTrainingData::cleanup(Visitor& visitor) {
543   if (visitor.is_visited(this)) {
544     return;
545   }
546   visitor.visit(this);
547   if (has_holder()) {
548     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
549     if (is_excluded) {
550       ResourceMark rm;
551       log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name());
552       _holder = nullptr;
553       key()->make_empty();
554     }
555   }
556   for (int i = 0; i < _comp_deps.length(); i++) {
557     _comp_deps.at(i)->cleanup(visitor);
558   }
559 }
560 
561 void MethodTrainingData::cleanup(Visitor& visitor) {
562   if (visitor.is_visited(this)) {
563     return;
564   }
565   visitor.visit(this);
566   if (has_holder()) {
567     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
568       log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
569       if (_final_profile != nullptr && _final_profile->method() != _holder) {
570         log_warning(cds)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
571       }
572       _final_profile = nullptr;
573       _final_counters = nullptr;
574       _holder = nullptr;
575       key()->make_empty();
576     }
577   }
578   for (int i = 0; i < CompLevel_count - 1; i++) {
579     CompileTrainingData* ctd = _last_toplevel_compiles[i];
580     if (ctd != nullptr) {
581       ctd->cleanup(visitor);
582     }
583   }
584 }
585 
586 void KlassTrainingData::verify() {
587   for (int i = 0; i < comp_dep_count(); i++) {
588     CompileTrainingData* ctd = comp_dep(i);
589     if (!ctd->_init_deps.contains(this)) {
590       print_on(tty); tty->cr();
591       ctd->print_on(tty); tty->cr();
592     }
593     guarantee(ctd->_init_deps.contains(this), "");
594   }
595 }
596 
597 void MethodTrainingData::verify() {
598   iterate_compiles([](CompileTrainingData* ctd) {
599     ctd->verify();
600 
601     int init_deps_left1 = ctd->init_deps_left();
602     int init_deps_left2 = ctd->compute_init_deps_left();
603 
604     if (init_deps_left1 != init_deps_left2) {
605       ctd->print_on(tty); tty->cr();
606     }
607     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
608               init_deps_left1, init_deps_left2, ctd->init_deps_left());
609   });
610 }
611 
612 void CompileTrainingData::verify() {
613   for (int i = 0; i < init_dep_count(); i++) {
614     KlassTrainingData* ktd = init_dep(i);
615     if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) {
616       LogStreamHandle(Warning, training) log;
617       if (log.is_enabled()) {
618         ResourceMark rm;
619         log.print("CTD "); print_value_on(&log);
620         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
621       }
622     }
623     if (!ktd->_comp_deps.contains(this)) {
624       print_on(tty); tty->cr();
625       ktd->print_on(tty); tty->cr();
626     }
627     guarantee(ktd->_comp_deps.contains(this), "");
628   }
629 }
630 
631 void CompileTrainingData::cleanup(Visitor& visitor) {
632   if (visitor.is_visited(this)) {
633     return;
634   }
635   visitor.visit(this);
636   method()->cleanup(visitor);
637 }
638 
639 void TrainingData::serialize(SerializeClosure* soc) {
640   if (soc->writing()) {
641     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
642   } else {
643     _archived_training_data_dictionary.serialize_header(soc);
644   }
645   RecompilationSchedule::serialize(soc);
646 }
647 
648 class TrainingDataPrinter : StackObj {
649   outputStream* _st;
650   int _index;
651 public:
652   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
653   void do_value(TrainingData* td) {
654     const char* type = (td->is_KlassTrainingData()   ? "K" :
655                         td->is_MethodTrainingData()  ? "M" :
656                         td->is_CompileTrainingData() ? "C" : "?");
657     _st->print("%4d: %p %s ", _index++, td, type);
658     td->print_on(_st);
659     _st->cr();
660     if (td->is_KlassTrainingData()) {
661       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
662         ResourceMark rm;
663         _st->print_raw("  C ");
664         ctd->print_on(_st);
665         _st->cr();
666       });
667     } else if (td->is_MethodTrainingData()) {
668       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
669         ResourceMark rm;
670         _st->print_raw("  C ");
671         ctd->print_on(_st);
672         _st->cr();
673       });
674     } else if (td->is_CompileTrainingData()) {
675       // ?
676     }
677   }
678 };
679 
680 void TrainingData::print_archived_training_data_on(outputStream* st) {
681   st->print_cr("Archived TrainingData Dictionary");
682   TrainingDataPrinter tdp(st);
683   TrainingDataLocker::initialize();
684   _archived_training_data_dictionary.iterate(&tdp);
685   RecompilationSchedule::print_archived_training_data_on(st);
686 }
687 
688 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
689   iter->push(const_cast<Metadata**>(&_meta));
690 }
691 
692 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
693   _key.metaspace_pointers_do(iter);
694 }
695 
696 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
697   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
698 }
699 
700 uint TrainingData::Key::cds_hash(const Key* const& k) {
701   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
702 }
703 
704 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) {
705   if (!need_data()) {
706     return;
707   }
708   assert(_dumptime_training_data_dictionary != nullptr, "");
709   CompactHashtableStats stats;
710   dictionary->reset();
711   CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
712   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
713     TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
714 #ifdef ASSERT
715     for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
716       TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
717       assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
718     }
719 #endif // ASSERT
720     td = ArchiveBuilder::current()->get_buffered_addr(td);
721     uint hash = TrainingData::Key::cds_hash(td->key());
722     u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
723     writer.add(hash, delta);
724   }
725   writer.dump(dictionary, "training data dictionary");
726 }
727 
728 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
729   // For this to work, all components of the key must be in shared metaspace.
730   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
731     return nullptr;
732   }
733   uint hash = TrainingData::Key::cds_hash(k);
734   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
735   if (td != nullptr) {
736     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
737         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
738       return td;
739     } else {
740       ShouldNotReachHere();
741     }
742   }
743   return nullptr;
744 }
745 
746 template <typename T>
747 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
748   iter->push(&_deps);
749 }
750 
751 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
752   log_trace(cds)("Iter(KlassTrainingData): %p", this);
753   TrainingData::metaspace_pointers_do(iter);
754   _comp_deps.metaspace_pointers_do(iter);
755   iter->push(&_holder);
756 }
757 
758 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
759   log_trace(cds)("Iter(MethodTrainingData): %p", this);
760   TrainingData::metaspace_pointers_do(iter);
761   iter->push(&_klass);
762   iter->push((Method**)&_holder);
763   for (int i = 0; i < CompLevel_count - 1; i++) {
764     iter->push(&_last_toplevel_compiles[i]);
765   }
766   iter->push(&_final_profile);
767   iter->push(&_final_counters);
768 }
769 
770 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
771   log_trace(cds)("Iter(CompileTrainingData): %p", this);
772   TrainingData::metaspace_pointers_do(iter);
773   _init_deps.metaspace_pointers_do(iter);
774   _ci_records.metaspace_pointers_do(iter);
775   iter->push(&_method);
776 }
777 
778 template <typename T>
779 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
780   if (_deps == nullptr && _deps_dyn != nullptr) {
781     int len = _deps_dyn->length();
782     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
783     for (int i = 0; i < len; i++) {
784       _deps->at_put(i, _deps_dyn->at(i)); // copy
785     }
786   }
787 }
788 
789 void KlassTrainingData::remove_unshareable_info() {
790   TrainingData::remove_unshareable_info();
791   _holder_mirror = nullptr;
792   _comp_deps.remove_unshareable_info();
793 }
794 
795 void MethodTrainingData::remove_unshareable_info() {
796   TrainingData::remove_unshareable_info();
797   if (_final_counters != nullptr) {
798     _final_counters->remove_unshareable_info();
799   }
800   if (_final_profile != nullptr) {
801     _final_profile->remove_unshareable_info();
802   }
803 }
804 
805 void CompileTrainingData::remove_unshareable_info() {
806   TrainingData::remove_unshareable_info();
807   _init_deps.remove_unshareable_info();
808   _ci_records.remove_unshareable_info();
809   _init_deps_left = compute_init_deps_left(true);
810 }