1 /*
  2  * Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciEnv.hpp"
 27 #include "ci/ciMetadata.hpp"
 28 #include "cds/cdsConfig.hpp"
 29 #include "cds/metaspaceShared.hpp"
 30 #include "classfile/classLoaderData.hpp"
 31 #include "classfile/compactHashtable.hpp"
 32 #include "classfile/javaClasses.hpp"
 33 #include "classfile/symbolTable.hpp"
 34 #include "classfile/systemDictionaryShared.hpp"
 35 #include "compiler/compileTask.hpp"
 36 #include "memory/metadataFactory.hpp"
 37 #include "memory/metaspaceClosure.hpp"
 38 #include "memory/resourceArea.hpp"
 39 #include "oops/method.hpp"
 40 #include "oops/methodCounters.hpp"
 41 #include "oops/recompilationSchedule.hpp"
 42 #include "oops/trainingData.hpp"
 43 #include "runtime/arguments.hpp"
 44 #include "runtime/javaThread.inline.hpp"
 45 #include "runtime/jniHandles.inline.hpp"
 46 #include "utilities/growableArray.hpp"
 47 
 48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 52 int TrainingData::TrainingDataLocker::_lock_mode;
 53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 54 
 55 MethodTrainingData::MethodTrainingData() {
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 61 }
 62 
 63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 64   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 65 }
 66 
 67 void TrainingData::initialize() {
 68   // this is a nop if training modes are not enabled
 69   if (have_data() || need_data()) {
 70     TrainingDataLocker::initialize();
 71   }
 72   RecompilationSchedule::initialize();
 73 }
 74 
 75 #if INCLUDE_CDS
 76 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 77   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 78   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 79   guarantee(td == td1, "");
 80 }
 81 #endif
 82 
 83 void TrainingData::verify() {
 84 #if INCLUDE_CDS
 85   if (TrainingData::have_data()) {
 86     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 87       if (td->is_KlassTrainingData()) {
 88         KlassTrainingData* ktd = td->as_KlassTrainingData();
 89         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 90           Key k(ktd->holder());
 91           verify_archived_entry(td, &k);
 92         }
 93         ktd->verify();
 94       } else if (td->is_MethodTrainingData()) {
 95         MethodTrainingData* mtd = td->as_MethodTrainingData();
 96         if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
 97           Key k(mtd->holder());
 98           verify_archived_entry(td, &k);
 99         }
100         mtd->verify();
101       } else if (td->is_CompileTrainingData()) {
102         td->as_CompileTrainingData()->verify();
103       }
104     });
105   }
106 #endif
107 }
108 
109 MethodTrainingData* MethodTrainingData::make(const methodHandle& method,
110                                              bool null_if_not_found) {
111   MethodTrainingData* mtd = nullptr;
112   if (!have_data() && !need_data()) {
113     return mtd;
114   }
115   // Try grabbing the cached value first.
116   MethodCounters* mcs = method->method_counters();
117   if (mcs != nullptr) {
118     mtd = mcs->method_training_data();
119     if (mtd != nullptr) {
120       return mtd;
121     }
122   } else {
123     mcs = Method::build_method_counters(Thread::current(), method());
124   }
125 
126   KlassTrainingData* holder = KlassTrainingData::make(method->method_holder(), null_if_not_found);
127   if (holder == nullptr) {
128     return nullptr; // allocation failure
129   }
130   Key key(method());
131   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
132   if (td != nullptr) {
133     mtd = td->as_MethodTrainingData();
134     method->init_training_data(mtd);  // Cache the pointer for next time.
135     return mtd;
136   } else {
137     TrainingDataLocker l;
138     td = training_data_set()->find(&key);
139     if (td == nullptr && null_if_not_found) {
140       return nullptr;
141     }
142     if (td != nullptr) {
143       mtd = td->as_MethodTrainingData();
144       method->init_training_data(mtd); // Cache the pointer for next time.
145       return mtd;
146     }
147   }
148   assert(td == nullptr && mtd == nullptr && !null_if_not_found, "Should return if have result");
149   KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
150   if (ktd != nullptr) {
151     TrainingDataLocker l;
152     td = training_data_set()->find(&key);
153     if (td == nullptr) {
154       mtd = MethodTrainingData::allocate(method(), ktd);
155       if (mtd == nullptr) {
156         return nullptr; // allocation failure
157       }
158       td = training_data_set()->install(mtd);
159       assert(td == mtd, "");
160     } else {
161       mtd = td->as_MethodTrainingData();
162     }
163     method->init_training_data(mtd);
164   }
165   return mtd;
166 }
167 
168 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
169   if (has_holder()) {
170     _klass->print_on(st, true);
171     st->print(".");
172     name()->print_symbol_on(st);
173     signature()->print_symbol_on(st);
174   }
175   if (name_only) {
176     return;
177   }
178   if (!has_holder()) {
179     st->print("[SYM]");
180   }
181   if (_level_mask) {
182     st->print(" LM%d", _level_mask);
183   }
184   st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
185 }
186 
187 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
188   int level = task->comp_level();
189   int compile_id = task->compile_id();
190   Thread* thread = Thread::current();
191   methodHandle m(thread, task->method());
192   MethodTrainingData* mtd = MethodTrainingData::make(m);
193   if (mtd == nullptr) {
194     return nullptr; // allocation failure
195   }
196   mtd->notice_compilation(level);
197 
198   TrainingDataLocker l;
199   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
200   if (ctd != nullptr) {
201     if (mtd->_last_toplevel_compiles[level - 1] != nullptr) {
202       if (mtd->_last_toplevel_compiles[level - 1]->compile_id() < compile_id) {
203         mtd->_last_toplevel_compiles[level - 1]->clear_init_deps();
204         mtd->_last_toplevel_compiles[level - 1] = ctd;
205         mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
206       }
207     } else {
208       mtd->_last_toplevel_compiles[level - 1] = ctd;
209       mtd->_highest_top_level = MAX2(mtd->_highest_top_level, level);
210     }
211   }
212   return ctd;
213 }
214 
215 
216 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
217   LogStreamHandle(Trace, training) log;
218   if (log.is_enabled()) {
219     log.print("CTD "); print_on(&log); log.cr();
220     log.print("KTD "); ktd->print_on(&log); log.cr();
221   }
222   assert(ktd!= nullptr && ktd->has_holder(), "");
223   assert(_init_deps.contains(ktd), "");
224   assert(_init_deps_left > 0, "");
225 
226   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
227 
228   if (log.is_enabled()) {
229     uint init_deps_left2 = compute_init_deps_left();
230     log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
231     ktd->print_on(&log, true);
232   }
233 }
234 
235 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
236   int left = 0;
237   for (int i = 0; i < _init_deps.length(); i++) {
238     KlassTrainingData* ktd = _init_deps.at(i);
239     // Ignore symbolic refs and already initialized classes (unless explicitly requested).
240     if (ktd->has_holder()) {
241       InstanceKlass* holder = ktd->holder();
242       if (!ktd->holder()->is_initialized() || count_initialized) {
243         ++left;
244       } else if (holder->is_shared_unregistered_class()) {
245         Key k(holder);
246         if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
247           ++left; // FIXME: !!! init tracking doesn't work well for custom loaders !!!
248         }
249       }
250     }
251   }
252   return left;
253 }
254 
255 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
256   _method->print_on(st, true);
257   st->print("#%dL%d", _compile_id, _level);
258   if (name_only) {
259     return;
260   }
261   #define MAYBE_TIME(Q, _qtime) \
262     if (_qtime != 0) st->print(" " #Q "%.3f", _qtime)
263   MAYBE_TIME(Q, _qtime);
264   MAYBE_TIME(S, _stime);
265   MAYBE_TIME(E, _etime);
266   if (_init_deps.length() > 0) {
267     if (_init_deps_left > 0) {
268       st->print(" udeps=%d", _init_deps_left);
269     }
270     for (int i = 0, len = _init_deps.length(); i < len; i++) {
271       st->print(" dep:");
272       _init_deps.at(i)->print_on(st, true);
273     }
274   }
275 }
276 
277 void CompileTrainingData::record_compilation_queued(CompileTask* task) {
278   _qtime = tty->time_stamp().seconds();
279 }
280 void CompileTrainingData::record_compilation_start(CompileTask* task) {
281   _stime = tty->time_stamp().seconds();
282 }
283 void CompileTrainingData::record_compilation_end(CompileTask* task) {
284   _etime = tty->time_stamp().seconds();
285   if (task->is_success()) {   // record something about the nmethod output
286     _nm_total_size = task->nm_total_size();
287   }
288 }
289 void CompileTrainingData::notice_inlined_method(CompileTask* task,
290                                                 const methodHandle& method) {
291   MethodTrainingData* mtd = MethodTrainingData::make(method);
292   if (mtd != nullptr) {
293     mtd->notice_compilation(task->comp_level(), true);
294   }
295 }
296 
297 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
298   // A JIT is starting to look at class k.
299   // We could follow the queries that it is making, but it is
300   // simpler to assume, conservatively, that the JIT will
301   // eventually depend on the initialization state of k.
302   CompileTask* task = env->task();
303   assert(task != nullptr, "");
304   Method* method = task->method();
305   InstanceKlass* compiling_klass = method->method_holder();
306   if (what->is_metadata()) {
307     ciMetadata* md = what->as_metadata();
308     if (md->is_loaded() && md->is_instance_klass()) {
309       ciInstanceKlass* cik = md->as_instance_klass();
310 
311       if (cik->is_initialized()) {
312         InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
313         KlassTrainingData* ktd = KlassTrainingData::make(ik);
314         if (ktd == nullptr) {
315           // Allocation failure or snapshot in progress
316           return;
317         }
318         // This JIT task is (probably) requesting that ik be initialized,
319         // so add him to my _init_deps list.
320         TrainingDataLocker l;
321         add_init_dep(ktd);
322       }
323     }
324   }
325 }
326 
327 void KlassTrainingData::prepare(Visitor& visitor) {
328   if (visitor.is_visited(this)) {
329     return;
330   }
331   visitor.visit(this);
332   ClassLoaderData* loader_data = nullptr;
333   if (_holder != nullptr) {
334     loader_data = _holder->class_loader_data();
335   } else {
336     loader_data = java_lang_ClassLoader::loader_data(SystemDictionary::java_system_loader()); // default CLD
337   }
338   _comp_deps.prepare(loader_data);
339 }
340 
341 void MethodTrainingData::prepare(Visitor& visitor) {
342   if (visitor.is_visited(this)) {
343     return;
344   }
345   visitor.visit(this);
346   klass()->prepare(visitor);
347   if (has_holder()) {
348     _final_counters = holder()->method_counters();
349     _final_profile  = holder()->method_data();
350     assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
351   }
352   for (int i = 0; i < CompLevel_count; i++) {
353     CompileTrainingData* ctd = _last_toplevel_compiles[i];
354     if (ctd != nullptr) {
355       ctd->prepare(visitor);
356     }
357   }
358 }
359 
360 void CompileTrainingData::prepare(Visitor& visitor) {
361   if (visitor.is_visited(this)) {
362     return;
363   }
364   visitor.visit(this);
365   method()->prepare(visitor);
366   ClassLoaderData* loader_data = _method->klass()->class_loader_data();
367   _init_deps.prepare(loader_data);
368   _ci_records.prepare(loader_data);
369 }
370 
371 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
372   Key key(holder);
373   TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
374   KlassTrainingData* ktd = nullptr;
375   if (td != nullptr) {
376     ktd = td->as_KlassTrainingData();
377     guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
378     if (ktd->has_holder()) {
379       return ktd;
380     }
381   }
382   TrainingDataLocker l;
383   td = training_data_set()->find(&key);
384   if (td == nullptr) {
385     if (null_if_not_found) {
386       return nullptr;
387     }
388     ktd = KlassTrainingData::allocate(holder);
389     if (ktd == nullptr) {
390       return nullptr; // allocation failure
391     }
392     td = training_data_set()->install(ktd);
393     assert(ktd == td, "");
394   } else {
395     ktd = td->as_KlassTrainingData();
396     guarantee(ktd->holder() != nullptr, "null holder");
397   }
398   assert(ktd != nullptr, "");
399   guarantee(ktd->holder() == holder, "");
400   return ktd;
401 }
402 
403 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
404   if (has_holder()) {
405     name()->print_symbol_on(st);
406     switch (holder()->init_state()) {
407       case InstanceKlass::allocated:            st->print("[A]"); break;
408       case InstanceKlass::loaded:               st->print("[D]"); break;
409       case InstanceKlass::linked:               st->print("[L]"); break;
410       case InstanceKlass::being_initialized:    st->print("[i]"); break;
411       case InstanceKlass::fully_initialized:    /*st->print("");*/ break;
412       case InstanceKlass::initialization_error: st->print("[E]"); break;
413       default: fatal("unknown state: %d", holder()->init_state());
414     }
415     if (holder()->is_interface()) {
416       st->print("I");
417     }
418   } else {
419     st->print("[SYM]");
420   }
421   if (name_only) {
422     return;
423   }
424   if (_comp_deps.length() > 0) {
425     for (int i = 0, len = _comp_deps.length(); i < len; i++) {
426       st->print(" dep:");
427       _comp_deps.at(i)->print_on(st, true);
428     }
429   }
430 }
431 
432 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
433   if (holder() == klass) {
434     return;   // no change to make
435   }
436 
437   jobject hmj = _holder_mirror;
438   if (hmj != nullptr) {   // clear out previous handle, if any
439     _holder_mirror = nullptr;
440     assert(JNIHandles::is_global_handle(hmj), "");
441     JNIHandles::destroy_global(hmj);
442   }
443 
444   // Keep the klass alive during the training run, unconditionally.
445   //
446   // FIXME: Revisit this decision; we could allow training runs to
447   // unload classes in the normal way.  We might use make_weak_global
448   // instead of make_global.
449   //
450   // The data from the training run would mention the name of the
451   // unloaded class (and of its loader).  Is it worth the complexity
452   // to track and then unload classes, remembering just their names?
453 
454   if (klass != nullptr) {
455     Handle hm(JavaThread::current(), klass->java_mirror());
456     hmj = JNIHandles::make_global(hm);
457     Atomic::release_store(&_holder_mirror, hmj);
458   }
459 
460   Atomic::release_store(&_holder, const_cast<InstanceKlass*>(klass));
461   assert(holder() == klass, "");
462 }
463 
464 void KlassTrainingData::notice_fully_initialized() {
465   ResourceMark rm;
466   assert(has_holder(), "");
467   assert(holder()->is_initialized(), "wrong state: %s %s",
468          holder()->name()->as_C_string(), holder()->init_state_name());
469 
470   TrainingDataLocker l; // Not a real lock if we don't collect the data,
471                         // that's why we need the atomic decrement below.
472   for (int i = 0; i < comp_dep_count(); i++) {
473     comp_dep(i)->dec_init_deps_left(this);
474   }
475   holder()->set_has_init_deps_processed();
476 }
477 
478 void TrainingData::init_dumptime_table(TRAPS) {
479   if (!need_data()) {
480     return;
481   }
482   _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
483   if (CDSConfig::is_dumping_final_static_archive()) {
484     _archived_training_data_dictionary.iterate([&](TrainingData* record) {
485       _dumptime_training_data_dictionary->append(record);
486     });
487   } else {
488     TrainingDataLocker l;
489     TrainingDataLocker::snapshot();
490 
491     ResourceMark rm;
492     Visitor visitor(training_data_set()->size());
493     training_data_set()->iterate_all([&](const TrainingData::Key* k, TrainingData* td) {
494       td->prepare(visitor);
495       if (!td->is_CompileTrainingData()) {
496         _dumptime_training_data_dictionary->append(td);
497       }
498     });
499 
500     if (VerifyTrainingData) {
501       training_data_set()->verify();
502     }
503   }
504 
505   RecompilationSchedule::prepare(CHECK);
506 }
507 
508 #if INCLUDE_CDS
509 void TrainingData::iterate_roots(MetaspaceClosure* it) {
510   if (!need_data()) {
511     return;
512   }
513   assert(_dumptime_training_data_dictionary != nullptr, "");
514   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
515     _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
516   }
517   RecompilationSchedule::iterate_roots(it);
518 }
519 
520 void TrainingData::dump_training_data() {
521   if (!need_data()) {
522     return;
523   }
524   write_training_data_dictionary(&_archived_training_data_dictionary_for_dumping);
525 }
526 
527 void TrainingData::cleanup_training_data() {
528   if (_dumptime_training_data_dictionary != nullptr) {
529     ResourceMark rm;
530     Visitor visitor(_dumptime_training_data_dictionary->length());
531     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
532       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
533       td->cleanup(visitor);
534     }
535     // Throw away all elements with empty keys
536     int j = 0;
537     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
538       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
539       if (td->key()->is_empty()) {
540         continue;
541       }
542       if (i != j) { // no need to copy if it's the same
543         _dumptime_training_data_dictionary->at_put(j, td);
544       }
545       j++;
546     }
547     _dumptime_training_data_dictionary->trunc_to(j);
548   }
549   RecompilationSchedule::cleanup();
550 }
551 
552 void KlassTrainingData::cleanup(Visitor& visitor) {
553   if (visitor.is_visited(this)) {
554     return;
555   }
556   visitor.visit(this);
557   if (has_holder()) {
558     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
559     if (is_excluded) {
560       ResourceMark rm;
561       log_debug(cds)("Cleanup KTD %s", name()->as_klass_external_name());
562       _holder = nullptr;
563       key()->make_empty();
564     }
565   }
566   for (int i = 0; i < _comp_deps.length(); i++) {
567     _comp_deps.at(i)->cleanup(visitor);
568   }
569 }
570 
571 void MethodTrainingData::cleanup(Visitor& visitor) {
572   if (visitor.is_visited(this)) {
573     return;
574   }
575   visitor.visit(this);
576   if (has_holder()) {
577     if (SystemDictionaryShared::check_for_exclusion(holder()->method_holder(), nullptr)) {
578       log_debug(cds)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
579       if (_final_profile != nullptr && _final_profile->method() != _holder) {
580         log_warning(cds)("Stale MDO for  %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
581       }
582       _holder = nullptr;
583       key()->make_empty();
584     }
585   }
586   for (int i = 0; i < CompLevel_count; i++) {
587     CompileTrainingData* ctd = _last_toplevel_compiles[i];
588     if (ctd != nullptr) {
589       ctd->cleanup(visitor);
590     }
591   }
592 }
593 
594 void KlassTrainingData::verify() {
595   for (int i = 0; i < comp_dep_count(); i++) {
596     CompileTrainingData* ctd = comp_dep(i);
597     if (!ctd->_init_deps.contains(this)) {
598       print_on(tty); tty->cr();
599       ctd->print_on(tty); tty->cr();
600     }
601     guarantee(ctd->_init_deps.contains(this), "");
602   }
603 }
604 
605 void MethodTrainingData::verify() {
606   iterate_all_compiles([](CompileTrainingData* ctd) {
607     ctd->verify();
608 
609     int init_deps_left1 = ctd->init_deps_left();
610     int init_deps_left2 = ctd->compute_init_deps_left();
611 
612     if (init_deps_left1 != init_deps_left2) {
613       ctd->print_on(tty); tty->cr();
614     }
615     guarantee(init_deps_left1 == init_deps_left2, "mismatch: %d %d %d",
616               init_deps_left1, init_deps_left2, ctd->init_deps_left());
617   });
618 }
619 
620 void CompileTrainingData::verify() {
621   for (int i = 0; i < init_dep_count(); i++) {
622     KlassTrainingData* ktd = init_dep(i);
623     if (ktd->has_holder() && ktd->holder()->is_shared_unregistered_class()) {
624       LogStreamHandle(Warning, training) log;
625       if (log.is_enabled()) {
626         ResourceMark rm;
627         log.print("CTD "); print_value_on(&log);
628         log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
629       }
630     }
631     if (!ktd->_comp_deps.contains(this)) {
632       print_on(tty); tty->cr();
633       ktd->print_on(tty); tty->cr();
634     }
635     guarantee(ktd->_comp_deps.contains(this), "");
636   }
637 }
638 
639 void CompileTrainingData::cleanup(Visitor& visitor) {
640   if (visitor.is_visited(this)) {
641     return;
642   }
643   visitor.visit(this);
644   method()->cleanup(visitor);
645 }
646 
647 void TrainingData::serialize_training_data(SerializeClosure* soc) {
648   if (soc->writing()) {
649     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
650   } else {
651     _archived_training_data_dictionary.serialize_header(soc);
652   }
653   RecompilationSchedule::serialize_training_data(soc);
654 }
655 
656 void TrainingData::print_archived_training_data_on(outputStream* st) {
657   st->print_cr("Archived TrainingData Dictionary");
658   TrainingDataPrinter tdp(st);
659   TrainingDataLocker::initialize();
660   _archived_training_data_dictionary.iterate(&tdp);
661   RecompilationSchedule::print_archived_training_data_on(st);
662 }
663 
664 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
665   iter->push(const_cast<Metadata**>(&_meta));
666 }
667 
668 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
669   _key.metaspace_pointers_do(iter);
670 }
671 
672 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
673   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
674 }
675 
676 uint TrainingData::Key::cds_hash(const Key* const& k) {
677   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
678 }
679 
680 void TrainingData::write_training_data_dictionary(TrainingDataDictionary* dictionary) {
681   if (!need_data()) {
682     return;
683   }
684   assert(_dumptime_training_data_dictionary != nullptr, "");
685   CompactHashtableStats stats;
686   dictionary->reset();
687   CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
688   for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
689     TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
690 #ifdef ASSERT
691     for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
692       TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
693       assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
694     }
695 #endif // ASSERT
696     td = ArchiveBuilder::current()->get_buffered_addr(td);
697     uint hash = TrainingData::Key::cds_hash(td->key());
698     u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
699     writer.add(hash, delta);
700   }
701   writer.dump(dictionary, "training data dictionary");
702 }
703 
704 size_t TrainingData::estimate_size_for_archive() {
705   if (_dumptime_training_data_dictionary != nullptr) {
706     return CompactHashtableWriter::estimate_size(_dumptime_training_data_dictionary->length());
707   } else {
708     return 0;
709   }
710 }
711 
712 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
713   // For this to work, all components of the key must be in shared metaspace.
714   if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
715     return nullptr;
716   }
717   uint hash = TrainingData::Key::cds_hash(k);
718   TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
719   if (td != nullptr) {
720     if ((td->is_KlassTrainingData()  && td->as_KlassTrainingData()->has_holder()) ||
721         (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
722       return td;
723     } else {
724       ShouldNotReachHere();
725     }
726   }
727   return nullptr;
728 }
729 #endif
730 
731 KlassTrainingData* TrainingData::lookup_for(InstanceKlass* ik) {
732 #if INCLUDE_CDS
733   if (TrainingData::have_data() && ik != nullptr && ik->is_loaded()) {
734     TrainingData::Key key(ik);
735     TrainingData* td = TrainingData::lookup_archived_training_data(&key);
736     if (td != nullptr && td->is_KlassTrainingData()) {
737       return td->as_KlassTrainingData();
738     }
739   }
740 #endif
741   return nullptr;
742 }
743 
744 MethodTrainingData* TrainingData::lookup_for(Method* m) {
745 #if INCLUDE_CDS
746   if (TrainingData::have_data() && m != nullptr) {
747     KlassTrainingData* holder_ktd = TrainingData::lookup_for(m->method_holder());
748     if (holder_ktd != nullptr) {
749       TrainingData::Key key(m);
750       TrainingData* td = TrainingData::lookup_archived_training_data(&key);
751       if (td != nullptr && td->is_MethodTrainingData()) {
752         return td->as_MethodTrainingData();
753       }
754     }
755   }
756 #endif
757   return nullptr;
758 }
759 
760 template <typename T>
761 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
762   iter->push(&_deps);
763 }
764 
765 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
766   log_trace(cds)("Iter(KlassTrainingData): %p", this);
767 #if INCLUDE_CDS
768   TrainingData::metaspace_pointers_do(iter);
769 #endif
770   _comp_deps.metaspace_pointers_do(iter);
771   iter->push(&_holder);
772 }
773 
774 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
775   log_trace(cds)("Iter(MethodTrainingData): %p", this);
776 #if INCLUDE_CDS
777   TrainingData::metaspace_pointers_do(iter);
778 #endif
779   iter->push(&_klass);
780   iter->push((Method**)&_holder);
781   for (int i = 0; i < CompLevel_count; i++) {
782     iter->push(&_last_toplevel_compiles[i]);
783   }
784   iter->push(&_final_profile);
785   iter->push(&_final_counters);
786 }
787 
788 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
789   log_trace(cds)("Iter(CompileTrainingData): %p", this);
790 #if INCLUDE_CDS
791   TrainingData::metaspace_pointers_do(iter);
792 #endif
793   _init_deps.metaspace_pointers_do(iter);
794   _ci_records.metaspace_pointers_do(iter);
795   iter->push(&_method);
796 }
797 
798 template <typename T>
799 void TrainingData::DepList<T>::prepare(ClassLoaderData* loader_data) {
800   if (_deps == nullptr && _deps_dyn != nullptr) {
801     int len = _deps_dyn->length();
802     _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
803     for (int i = 0; i < len; i++) {
804       _deps->at_put(i, _deps_dyn->at(i)); // copy
805     }
806   }
807 }
808 
809 void TrainingDataPrinter::do_value(TrainingData* td) {
810 #ifdef ASSERT
811 #if INCLUDE_CDS
812   TrainingData::Key key(td->key()->meta());
813   assert(td == TrainingData::archived_training_data_dictionary()->lookup(td->key(), TrainingData::Key::cds_hash(td->key()), -1), "");
814   assert(td == TrainingData::archived_training_data_dictionary()->lookup(&key, TrainingData::Key::cds_hash(&key), -1), "");
815 #endif
816 #endif // ASSERT
817 
818   const char* type = (td->is_KlassTrainingData()   ? "K" :
819                       td->is_MethodTrainingData()  ? "M" :
820                       td->is_CompileTrainingData() ? "C" : "?");
821   _st->print("%4d: %p %s ", _index++, td, type);
822   td->print_on(_st);
823   _st->cr();
824   if (td->is_KlassTrainingData()) {
825     td->as_KlassTrainingData()->iterate_all_comp_deps([&](CompileTrainingData* ctd) {
826       ResourceMark rm;
827       _st->print_raw("  C ");
828       ctd->print_on(_st);
829       _st->cr();
830     });
831   } else if (td->is_MethodTrainingData()) {
832     td->as_MethodTrainingData()->iterate_all_compiles([&](CompileTrainingData* ctd) {
833       ResourceMark rm;
834       _st->print_raw("  C ");
835       ctd->print_on(_st);
836       _st->cr();
837     });
838   } else if (td->is_CompileTrainingData()) {
839     // ?
840   }
841 }
842 
843 
844 #if INCLUDE_CDS
845 void KlassTrainingData::remove_unshareable_info() {
846   TrainingData::remove_unshareable_info();
847   _holder_mirror = nullptr;
848   _comp_deps.remove_unshareable_info();
849 }
850 
851 void MethodTrainingData::remove_unshareable_info() {
852   TrainingData::remove_unshareable_info();
853   if (_final_counters != nullptr) {
854     _final_counters->remove_unshareable_info();
855   }
856   if (_final_profile != nullptr) {
857     _final_profile->remove_unshareable_info();
858   }
859 }
860 
861 void CompileTrainingData::remove_unshareable_info() {
862   TrainingData::remove_unshareable_info();
863   _init_deps.remove_unshareable_info();
864   _ci_records.remove_unshareable_info();
865   _init_deps_left = compute_init_deps_left(true);
866 }
867 
868 #endif // INCLUDE_CDS