1 /*
  2  * Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciEnv.hpp"
 27 #include "ci/ciMetadata.hpp"
 28 #include "cds/cdsConfig.hpp"
 29 #include "cds/metaspaceShared.hpp"
 30 #include "classfile/classLoaderData.hpp"
 31 #include "classfile/compactHashtable.hpp"
 32 #include "classfile/javaClasses.hpp"
 33 #include "classfile/symbolTable.hpp"
 34 #include "classfile/systemDictionaryShared.hpp"
 35 #include "compiler/compileTask.hpp"
 36 #include "memory/metadataFactory.hpp"
 37 #include "memory/metaspaceClosure.hpp"
 38 #include "memory/resourceArea.hpp"
 39 #include "oops/method.hpp"
 40 #include "oops/methodCounters.hpp"
 41 #include "oops/recompilationSchedule.hpp"
 42 #include "oops/trainingData.hpp"
 43 #include "runtime/arguments.hpp"
 44 #include "runtime/javaThread.inline.hpp"
 45 #include "runtime/jniHandles.inline.hpp"
 46 #include "utilities/growableArray.hpp"
 47 
 48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 52 int TrainingData::TrainingDataLocker::_lock_mode;
 53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 54 
 55 MethodTrainingData::MethodTrainingData() {
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 61 }
 62 
 63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 64   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 65 }
 66 
 67 void TrainingData::initialize() {
 68   // this is a nop if training modes are not enabled
 69   if (have_data() || need_data()) {
 70     // Data structures that we have do not currently support iterative training. So you cannot replay
 71     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 72     guarantee(have_data() != need_data(), "Iterative training is not supported");
 73     TrainingDataLocker::initialize();
 74   }
 75   RecompilationSchedule::initialize();
 76 }
 77 
 78 #if INCLUDE_CDS
 79 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 80   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 81   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 82   guarantee(td == td1, "");
 83 }
 84 #endif
 85 
 86 void TrainingData::verify() {
 87 #if INCLUDE_CDS
 88   if (TrainingData::have_data()) {
 89     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 90       if (td->is_KlassTrainingData()) {
 91         KlassTrainingData* ktd = td->as_KlassTrainingData();
 92         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 93           Key k(ktd->holder());
 94           verify_archived_entry(td, &k);
 95         }
 96         ktd->verify();
 97       } else if (td->is_MethodTrainingData()) {
 98         MethodTrainingData* mtd = td->as_MethodTrainingData();
 99         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
100           Key k(mtd->holder());
101           verify_archived_entry(td, &k);
102         }
103         mtd->verify();
104       } else if (td->is_CompileTrainingData()) {
105         td->as_CompileTrainingData()->verify();
106       }
107     });
108   }
109 #endif
110 }
111 
112 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
113   MethodTrainingData* mtd = nullptr;
114   if (!have_data() && !need_data()) {
115     return mtd;
116   }
117   // Try grabbing the cached value first.
118   // Cache value is stored in MethodCounters and the following are the
119   // possible states:
120   // 1. Cached value is null, training_data_lookup_failed = false
121   //    This is the brand new state, need a full lookup.
122   // 2. Cached value is null, training_data_lookup_failed = true
123   //    Lookup failed the last time, if we don't plan to create a new TD object,
124   //    i.e. null_if_no_found == true, then just return a null.
125   // 3. Cache value is not null.
126   //    Return it, the value of training_data_lookup_failed doesn't matter.
127   MethodCounters* mcs = method->method_counters();
128   if (mcs != nullptr) {
129     mtd = mcs->method_training_data();
130     if (mtd != nullptr) {
131       return mtd;
132     }
133     if (null_if_not_found && mcs->has_training_data_lookup_failed()) {
134       assert(mtd == nullptr, "No training data found");
135       return nullptr;
136     }
137   } else if (use_cache) {
138     mcs = Method::build_method_counters(Thread::current(), method());
139   }
140 
141   TrainingData* td = nullptr;
142 
143   Key key(method());
144   if (have_data()) {
145 #if INCLUDE_CDS
146     td = lookup_archived_training_data(&key);
147 #endif
148     if (td != nullptr) {
149       mtd = td->as_MethodTrainingData();
150     } else if (mcs != nullptr) {
151       assert(td == nullptr, "No training data found");
152       mcs->set_training_data_lookup_failed();
153     }
154   }
155 
156   if (need_data()) {
157     TrainingDataLocker l;
158     td = training_data_set()->find(&key);
159     if (td == nullptr) {
160       if (null_if_not_found) {
161         return nullptr;
162       }
163       KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder(), null_if_not_found);
164       if (ktd == nullptr) {
165         return nullptr;
166       }
167       mtd = MethodTrainingData::allocate(method(), ktd);
168       if (mtd == nullptr) {
169         return nullptr; // allocation failure
170       }
171       td = training_data_set()->install(mtd);
172       assert(td == mtd, "");
173     } else {
174       mtd = td->as_MethodTrainingData();
175     }
176   }
177 
178   // Cache the pointer to MTD in MethodCounters for faster lookup
179   method->init_training_data(mtd);
180   return mtd;
181 }
182 
183 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
184   if (has_holder()) {
185     _klass->print_on(st, true);
186     st->print(".");
187     name()->print_symbol_on(st);
188     signature()->print_symbol_on(st);
189   }
190   if (name_only) {
191     return;
192   }
193   if (!has_holder()) {
194     st->print("[SYM]");
195   }
196   if (_level_mask) {
197     st->print(" LM%d", _level_mask);
198   }
199   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
200 }
201 
202 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
203   int level = task->comp_level();
204   int compile_id = task->compile_id();
205   Thread* thread = Thread::current();
206   methodHandle m(thread, task->method());
207   MethodTrainingData* mtd = MethodTrainingData::make(m);
208   if (mtd == nullptr) {
209     return nullptr; // allocation failure
210   }
211   mtd->notice_compilation(level);
212 
213   TrainingDataLocker l;
214   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
215   if (ctd != nullptr) {
216     if (mtd->_last_toplevel_compiles[level - 1] != nullptr) {
217       if (mtd->_last_toplevel_compiles[level - 1]->compile_id() < compile_id) {
218         mtd->_last_toplevel_compiles[level - 1]->clear_init_deps();
219         mtd->_last_toplevel_compiles[level - 1] = ctd;
220         mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
221       }
222     } else {
223       mtd->_last_toplevel_compiles[level - 1] = ctd;
224       mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
225     }
226   }
227   return ctd;
228 }
229 
230 
231 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
232   LogStreamHandle(Trace, training) log;
233   if (log.is_enabled()) {
234     log.print("CTD "); print_on(&log); log.cr();
235     log.print("KTD "); ktd->print_on(&log); log.cr();
236   }
237   assert(ktd!= nullptr && ktd->has_holder(), "");
238   assert(_init_deps.contains(ktd), "");
239   assert(_init_deps_left > 0, "");
240 
241   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
242 
243   if (log.is_enabled()) {
244     uint init_deps_left2 = compute_init_deps_left();
245     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
246     ktd->print_on(&log, true);
247   }
248 }
249 
250 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
251   int left = 0;
252   for (int i = 0; i < _init_deps.length(); i++) {
253     KlassTrainingData* ktd = _init_deps.at(i);
254     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
255     if (ktd->has_holder()) {
256       InstanceKlass* holder = ktd->holder();
257       if (!ktd->holder()->is_initialized() || count_initialized) {
258         ++left;
259       } else if (holder->is_shared_unregistered_class()) {
260         Key k(holder);
261         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
262           ++left; // FIXME: !!! init tracking doesn't work well for custom loaders !!!
263         }
264       }
265     }
266   }
267   return left;
268 }
269 
270 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
271   _method->print_on(st, true);
272   st->print("#%dL%d", _compile_id, _level);
273   if (name_only) {
274     return;
275   }
276   if (_init_deps.length() > 0) {
277     if (_init_deps_left > 0) {
278       st->print(" udeps=%d", _init_deps_left);
279     }
280     for (int i = 0, len = _init_deps.length(); i < len; i++) {
281       st->print(" dep:");
282       _init_deps.at(i)->print_on(st, true);
283     }
284   }
285 }
286 
287 void CompileTrainingData::notice_inlined_method(CompileTask* task,
288                                                 const methodHandle& method) {
289   MethodTrainingData* mtd = MethodTrainingData::make(method);
290   if (mtd != nullptr) {
291     mtd->notice_compilation(task->comp_level(), true);
292   }
293 }
294 
295 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
296   // A JIT is starting to look at class k.
297   // We could follow the queries that it is making, but it is
298   // simpler to assume, conservatively, that the JIT will
299   // eventually depend on the initialization state of k.
300   CompileTask* task = env->task();
301   assert(task != nullptr, "");
302   Method* method = task->method();
303   InstanceKlass* compiling_klass = method->method_holder();
304   if (what->is_metadata()) {
305     ciMetadata* md = what->as_metadata();
306     if (md->is_loaded() && md->is_instance_klass()) {
307       ciInstanceKlass* cik = md->as_instance_klass();
308 
309       if (cik->is_initialized()) {
310         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
311         KlassTrainingData* ktd = KlassTrainingData::make(ik);
312         if (ktd == nullptr) {
313           // Allocation failure or snapshot in progress
314           return;
315         }
316         // This JIT task is (probably) requesting that ik be initialized,
317         // so add him to my _init_deps list.
318         TrainingDataLocker l;
319         add_init_dep(ktd);
320       }
321     }
322   }
323 }
324 
325 void KlassTrainingData::prepare(Visitor& visitor) {
326   if (visitor.is_visited(this)) {
327     return;
328   }
329   visitor.visit(this);
330   ClassLoaderData* loader_data = nullptr;
331   if (_holder != nullptr) {
332     loader_data = _holder->class_loader_data();
333   } else {
334     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
335   }
336   _comp_deps.prepare(loader_data);
337 }
338 
339 void MethodTrainingData::prepare(Visitor& visitor) {
340   if (visitor.is_visited(this)) {
341     return;
342   }
343   visitor.visit(this);
344   klass()->prepare(visitor);
345   if (has_holder()) {
346     _final_counters = holder()->method_counters();
347     _final_profile  = holder()->method_data();
348     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
349   }
350   for (int i = 0; i < CompLevel_count; i++) {
351     CompileTrainingData* ctd = _last_toplevel_compiles[i];
352     if (ctd != nullptr) {
353       ctd->prepare(visitor);
354     }
355   }
356 }
357 
358 void CompileTrainingData::prepare(Visitor& visitor) {
359   if (visitor.is_visited(this)) {
360     return;
361   }
362   visitor.visit(this);
363   method()->prepare(visitor);
364   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
365   _init_deps.prepare(loader_data);
366   _ci_records.prepare(loader_data);
367 }
368 
369 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
370   Key key(holder);
371   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
372   KlassTrainingData* ktd = nullptr;
373   if (td != nullptr) {
374     ktd = td->as_KlassTrainingData();
375     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
376     if (ktd->has_holder()) {
377       return ktd;
378     } else {
379       ktd = nullptr;
380     }
381   }
382   if (need_data()) {
383     TrainingDataLocker l;
384     td = training_data_set()->find(&key);
385     if (td == nullptr) {
386       if (null_if_not_found) {
387         return nullptr;
388       }
389       ktd = KlassTrainingData::allocate(holder);
390       if (ktd == nullptr) {
391         return nullptr; // allocation failure
392       }
393       td = training_data_set()->install(ktd);
394       assert(ktd == td, "");
395     } else {
396       ktd = td->as_KlassTrainingData();
397       guarantee(ktd->holder() != nullptr, "null holder");
398     }
399     assert(ktd != nullptr, "");
400     guarantee(ktd->holder() == holder, "");
401   }
402   return ktd;
403 }
404 
405 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
406   if (has_holder()) {
407     name()->print_symbol_on(st);
408     switch (holder()->init_state()) {
409       case InstanceKlass::allocated:            st->print("[A]"); break;
410       case InstanceKlass::loaded:               st->print("[D]"); break;
411       case InstanceKlass::linked:               st->print("[L]"); break;
412       case InstanceKlass::being_initialized:    st->print("[i]"); break;
413       case InstanceKlass::fully_initialized:    /*st->print("");*/ break;
414       case InstanceKlass::initialization_error: st->print("[E]"); break;
415       default: fatal("unknown state: %d", holder()->init_state());
416     }
417     if (holder()->is_interface()) {
418       st->print("I");
419     }
420   } else {
421     st->print("[SYM]");
422   }
423   if (name_only) {
424     return;
425   }
426   if (_comp_deps.length() > 0) {
427     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
428       st->print(" dep:");
429       _comp_deps.at(i)->print_on(st, true);
430     }
431   }
432 }
433 
434 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
435   if (holder() == klass) {
436     return;   // no change to make
437   }
438 
439   jobject hmj = _holder_mirror;
440   if (hmj != nullptr) {   // clear out previous handle, if any
441     _holder_mirror = nullptr;
442     assert(JNIHandles::is_global_handle(hmj), "");
443     JNIHandles::destroy_global(hmj);
444   }
445 
446   // Keep the klass alive during the training run, unconditionally.
447   //
448   // FIXME: Revisit this decision; we could allow training runs to
449   // unload classes in the normal way.  We might use make_weak_global
450   // instead of make_global.
451   //
452   // The data from the training run would mention the name of the
453   // unloaded class (and of its loader).  Is it worth the complexity
454   // to track and then unload classes, remembering just their names?
455 
456   if (klass != nullptr) {
457     Handle hm(JavaThread::current(), klass->java_mirror());
458     hmj = JNIHandles::make_global(hm);
459     Atomic::release_store(&_holder_mirror, hmj);
460   }
461 
462   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
463   assert(holder() == klass, "");
464 }
465 
466 void KlassTrainingData::notice_fully_initialized() {
467   ResourceMark rm;
468   assert(has_holder(), "");
469   assert(holder()->is_initialized(), "wrong state: %s %s",
470          holder()->name()->as_C_string(), holder()->init_state_name());
471 
472   TrainingDataLocker l; // Not a real lock if we don't collect the data,
473                         // that's why we need the atomic decrement below.
474   for (int i = 0; i < comp_dep_count(); i++) {
475     comp_dep(i)->dec_init_deps_left(this);
476   }
477   holder()->set_has_init_deps_processed();
478 }
479 
480 void TrainingData::init_dumptime_table(TRAPS) {
481   if (!need_data()) {
482     return;
483   }
484   _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
485   if (CDSConfig::is_dumping_final_static_archive()) {
486     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
487       _dumptime_training_data_dictionary->append(record);
488     });
489   } else {
490     TrainingDataLocker l;
491     TrainingDataLocker::snapshot();
492 
493     ResourceMark rm;
494     Visitor visitor(training_data_set()->size());
495     training_data_set()->iterate([&](TrainingData* td) {
496       td->prepare(visitor);
497       if (!td->is_CompileTrainingData()) {
498         _dumptime_training_data_dictionary->append(td);
499       }
500     });
501 
502     if (VerifyTrainingData) {
503       training_data_set()->verify();
504     }
505   }
506 
507   RecompilationSchedule::prepare(CHECK);
508 }
509 
510 #if INCLUDE_CDS
511 void TrainingData::iterate_roots(MetaspaceClosure* it) {
512   if (!need_data()) {
513     return;
514   }
515   assert(_dumptime_training_data_dictionary != nullptr, "");
516   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
517     _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
518   }
519   RecompilationSchedule::iterate_roots(it);
520 }
521 
522 void TrainingData::dump_training_data() {
523   if (!need_data()) {
524     return;
525   }
526   write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping);
527 }
528 
529 void TrainingData::cleanup_training_data() {
530   if (_dumptime_training_data_dictionary != nullptr) {
531     ResourceMark rm;
532     Visitor visitor(_dumptime_training_data_dictionary->length());
533     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
534       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
535       td->cleanup(visitor);
536     }
537     // Throw away all elements with empty keys
538     int j = 0;
539     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
540       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
541       if (td->key()->is_empty()) {
542         continue;
543       }
544       if (i != j) { // no need to copy if it's the same
545         _dumptime_training_data_dictionary->at_put(j, td);
546       }
547       j++;
548     }
549     _dumptime_training_data_dictionary->trunc_to(j);
550   }
551   RecompilationSchedule::cleanup();
552 }
553 
554 void KlassTrainingData::cleanup(Visitor& visitor) {
555   if (visitor.is_visited(this)) {
556     return;
557   }
558   visitor.visit(this);
559   if (has_holder()) {
560     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
561     if (is_excluded) {
562       ResourceMark rm;
563       log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name());
564       _holder = nullptr;
565       key()->make_empty();
566     }
567   }
568   for (int i = 0; i < _comp_deps.length(); i++) {
569     _comp_deps.at(i)->cleanup(visitor);
570   }
571 }
572 
573 void MethodTrainingData::cleanup(Visitor& visitor) {
574   if (visitor.is_visited(this)) {
575     return;
576   }
577   visitor.visit(this);
578   if (has_holder()) {
579     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
580       log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
581       if (_final_profile != nullptr && _final_profile->method() != _holder) {
582         log_warning(cds)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
583       }
584       _holder = nullptr;
585       key()->make_empty();
586     }
587   }
588   for (int i = 0; i < CompLevel_count; i++) {
589     CompileTrainingData* ctd = _last_toplevel_compiles[i];
590     if (ctd != nullptr) {
591       ctd->cleanup(visitor);
592     }
593   }
594 }
595 
596 void KlassTrainingData::verify() {
597   for (int i = 0; i < comp_dep_count(); i++) {
598     CompileTrainingData* ctd = comp_dep(i);
599     if (!ctd->_init_deps.contains(this)) {
600       print_on(tty); tty->cr();
601       ctd->print_on(tty); tty->cr();
602     }
603     guarantee(ctd->_init_deps.contains(this), "");
604   }
605 }
606 
607 void MethodTrainingData::verify() {
608   iterate_all_compiles([](CompileTrainingData* ctd) {
609     ctd->verify();
610 
611     int init_deps_left1 = ctd->init_deps_left();
612     int init_deps_left2 = ctd->compute_init_deps_left();
613 
614     if (init_deps_left1 != init_deps_left2) {
615       ctd->print_on(tty); tty->cr();
616     }
617     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
618               init_deps_left1, init_deps_left2, ctd->init_deps_left());
619   });
620 }
621 
622 void CompileTrainingData::verify() {
623   for (int i = 0; i < init_dep_count(); i++) {
624     KlassTrainingData* ktd = init_dep(i);
625     if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) {
626       LogStreamHandle(Warning, training) log;
627       if (log.is_enabled()) {
628         ResourceMark rm;
629         log.print("CTD "); print_value_on(&log);
630         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
631       }
632     }
633     if (!ktd->_comp_deps.contains(this)) {
634       print_on(tty); tty->cr();
635       ktd->print_on(tty); tty->cr();
636     }
637     guarantee(ktd->_comp_deps.contains(this), "");
638   }
639 }
640 
641 void CompileTrainingData::cleanup(Visitor& visitor) {
642   if (visitor.is_visited(this)) {
643     return;
644   }
645   visitor.visit(this);
646   method()->cleanup(visitor);
647 }
648 
649 void TrainingData::serialize_training_data(SerializeClosure* soc) {
650   if (soc->writing()) {
651     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
652   } else {
653     _archived_training_data_dictionary.serialize_header(soc);
654   }
655   RecompilationSchedule::serialize_training_data(soc);
656 }
657 
658 void TrainingData::print_archived_training_data_on(outputStream* st) {
659   st->print_cr("Archived TrainingData Dictionary");
660   TrainingDataPrinter tdp(st);
661   TrainingDataLocker::initialize();
662   _archived_training_data_dictionary.iterate(&tdp);
663   RecompilationSchedule::print_archived_training_data_on(st);
664 }
665 
666 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
667   iter->push(const_cast<Metadata**>(&_meta));
668 }
669 
670 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
671   _key.metaspace_pointers_do(iter);
672 }
673 
674 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
675   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
676 }
677 
678 uint TrainingData::Key::cds_hash(const Key* const& k) {
679   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
680 }
681 
682 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) {
683   if (!need_data()) {
684     return;
685   }
686   assert(_dumptime_training_data_dictionary != nullptr, "");
687   CompactHashtableStats stats;
688   dictionary->reset();
689   CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
690   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
691     TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
692 #ifdef ASSERT
693     for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
694       TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
695       assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
696     }
697 #endif // ASSERT
698     td = ArchiveBuilder::current()->get_buffered_addr(td);
699     uint hash = TrainingData::Key::cds_hash(td->key());
700     u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
701     writer.add(hash, delta);
702   }
703   writer.dump(dictionary, "training data dictionary");
704 }
705 
706 size_t TrainingData::estimate_size_for_archive() {
707   if (_dumptime_training_data_dictionary != nullptr) {
708     return CompactHashtableWriter::estimate_size(_dumptime_training_data_dictionary->length());
709   } else {
710     return 0;
711   }
712 }
713 
714 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
715   // For this to work, all components of the key must be in shared metaspace.
716   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
717     return nullptr;
718   }
719   uint hash = TrainingData::Key::cds_hash(k);
720   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
721   if (td != nullptr) {
722     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
723         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
724       return td;
725     } else {
726       ShouldNotReachHere();
727     }
728   }
729   return nullptr;
730 }
731 #endif
732 
733 template <typename T>
734 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
735   iter->push(&_deps);
736 }
737 
738 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
739   log_trace(cds)("Iter(KlassTrainingData): %p", this);
740 #if INCLUDE_CDS
741   TrainingData::metaspace_pointers_do(iter);
742 #endif
743   _comp_deps.metaspace_pointers_do(iter);
744   iter->push(&_holder);
745 }
746 
747 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
748   log_trace(cds)("Iter(MethodTrainingData): %p", this);
749 #if INCLUDE_CDS
750   TrainingData::metaspace_pointers_do(iter);
751 #endif
752   iter->push(&_klass);
753   iter->push((Method**)&_holder);
754   for (int i = 0; i < CompLevel_count; i++) {
755     iter->push(&_last_toplevel_compiles[i]);
756   }
757   iter->push(&_final_profile);
758   iter->push(&_final_counters);
759 }
760 
761 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
762   log_trace(cds)("Iter(CompileTrainingData): %p", this);
763 #if INCLUDE_CDS
764   TrainingData::metaspace_pointers_do(iter);
765 #endif
766   _init_deps.metaspace_pointers_do(iter);
767   _ci_records.metaspace_pointers_do(iter);
768   iter->push(&_method);
769 }
770 
771 template <typename T>
772 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
773   if (_deps == nullptr && _deps_dyn != nullptr) {
774     int len = _deps_dyn->length();
775     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
776     for (int i = 0; i < len; i++) {
777       _deps->at_put(i, _deps_dyn->at(i)); // copy
778     }
779   }
780 }
781 
782 void TrainingDataPrinter::do_value(TrainingData* td) {
783   const char* type = (td->is_KlassTrainingData()   ? "K" :
784                       td->is_MethodTrainingData()  ? "M" :
785                       td->is_CompileTrainingData() ? "C" : "?");
786   _st->print("%4d: %p %s ", _index++, td, type);
787   td->print_on(_st);
788   _st->cr();
789   if (td->is_KlassTrainingData()) {
790     td->as_KlassTrainingData()->iterate_all_comp_deps([&](CompileTrainingData* ctd) {
791       ResourceMark rm;
792       _st->print_raw("  C ");
793       ctd->print_on(_st);
794       _st->cr();
795     });
796   } else if (td->is_MethodTrainingData()) {
797     td->as_MethodTrainingData()->iterate_all_compiles([&](CompileTrainingData* ctd) {
798       ResourceMark rm;
799       _st->print_raw("  C ");
800       ctd->print_on(_st);
801       _st->cr();
802     });
803   } else if (td->is_CompileTrainingData()) {
804     // ?
805   }
806 }
807 
808 
809 #if INCLUDE_CDS
810 void KlassTrainingData::remove_unshareable_info() {
811   TrainingData::remove_unshareable_info();
812   _holder_mirror = nullptr;
813   _comp_deps.remove_unshareable_info();
814 }
815 
816 void MethodTrainingData::remove_unshareable_info() {
817   TrainingData::remove_unshareable_info();
818   if (_final_counters != nullptr) {
819     _final_counters->remove_unshareable_info();
820   }
821   if (_final_profile != nullptr) {
822     _final_profile->remove_unshareable_info();
823   }
824 }
825 
826 void CompileTrainingData::remove_unshareable_info() {
827   TrainingData::remove_unshareable_info();
828   _init_deps.remove_unshareable_info();
829   _ci_records.remove_unshareable_info();
830   _init_deps_left = compute_init_deps_left(true);
831 }
832 
833 #endif // INCLUDE_CDS