1 /*
2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/cdsConfig.hpp"
26 #include "ci/ciEnv.hpp"
27 #include "ci/ciMetadata.hpp"
28 #include "classfile/compactHashtable.hpp"
29 #include "classfile/javaClasses.hpp"
30 #include "classfile/symbolTable.hpp"
31 #include "classfile/systemDictionaryShared.hpp"
32 #include "compiler/compileTask.hpp"
33 #include "memory/metadataFactory.hpp"
34 #include "memory/metaspaceClosure.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/method.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/methodCounters.hpp"
40 #include "oops/trainingData.hpp"
41 #include "runtime/arguments.hpp"
42 #include "runtime/javaThread.inline.hpp"
43 #include "runtime/jniHandles.inline.hpp"
44 #include "utilities/growableArray.hpp"
45
46 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
47 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
49 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
50 int TrainingData::TrainingDataLocker::_lock_mode;
51 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
52
53 MethodTrainingData::MethodTrainingData() {
54 // Used by cppVtables.cpp only
55 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
56 }
57
58 KlassTrainingData::KlassTrainingData() {
59 // Used by cppVtables.cpp only
60 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
61 }
62
63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
64 // Used by cppVtables.cpp only
65 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
66 }
67
68 void TrainingData::initialize() {
69 // this is a nop if training modes are not enabled
70 if (have_data() || need_data()) {
71 // Data structures that we have do not currently support iterative training. So you cannot replay
72 // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
73 guarantee(have_data() != need_data(), "Iterative training is not supported");
74 TrainingDataLocker::initialize();
75 }
76 }
77
78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
79 guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
80 TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
81 guarantee(td == td1, "");
82 }
83
84 void TrainingData::verify() {
85 if (TrainingData::have_data() && !TrainingData::assembling_data()) {
86 archived_training_data_dictionary()->iterate([&](TrainingData* td) {
87 if (td->is_KlassTrainingData()) {
88 KlassTrainingData* ktd = td->as_KlassTrainingData();
89 if (ktd->has_holder() && ktd->holder()->is_loaded()) {
90 Key k(ktd->holder());
91 verify_archived_entry(td, &k);
92 }
93 ktd->verify();
94 } else if (td->is_MethodTrainingData()) {
95 MethodTrainingData* mtd = td->as_MethodTrainingData();
96 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
97 Key k(mtd->holder());
98 verify_archived_entry(td, &k);
99 }
100 mtd->verify(/*verify_dep_counter*/true);
101 }
102 });
103 }
104 if (TrainingData::need_data()) {
105 TrainingDataLocker l;
106 training_data_set()->iterate([&](TrainingData* td) {
107 if (td->is_KlassTrainingData()) {
108 KlassTrainingData* ktd = td->as_KlassTrainingData();
109 ktd->verify();
110 } else if (td->is_MethodTrainingData()) {
111 MethodTrainingData* mtd = td->as_MethodTrainingData();
112 // During the training run init deps tracking is not setup yet,
113 // don't verify it.
114 mtd->verify(/*verify_dep_counter*/false);
115 }
116 });
117 }
118 }
119
120 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
121 MethodTrainingData* mtd = nullptr;
122 if (!have_data() && !need_data()) {
123 return mtd;
124 }
125 // Try grabbing the cached value first.
126 // Cache value is stored in MethodCounters and the following are the
127 // possible states:
128 // 1. Cached value is method_training_data_sentinel().
129 // This is an initial state and needs a full lookup.
130 // 2. Cached value is null.
131 // Lookup failed the last time, if we don't plan to create a new TD object,
132 // i.e. null_if_no_found == true, then just return a null.
133 // 3. Cache value is not null.
134 // Return it, the value of training_data_lookup_failed doesn't matter.
135 MethodCounters* mcs = method->method_counters();
136 if (mcs != nullptr) {
137 mtd = mcs->method_training_data();
138 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
139 return mtd;
140 }
141 if (null_if_not_found && mtd == nullptr) {
142 assert(mtd == nullptr, "No training data found");
143 return nullptr;
144 }
145 } else if (use_cache) {
146 mcs = Method::build_method_counters(Thread::current(), method());
147 }
148
149 TrainingData* td = nullptr;
150
151 Key key(method());
152 if (have_data()) {
153 td = lookup_archived_training_data(&key);
154 if (td != nullptr) {
155 mtd = td->as_MethodTrainingData();
156 } else {
157 mtd = nullptr;
158 }
159 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
160 method->init_training_data(mtd);
161 }
162
163 if (need_data()) {
164 TrainingDataLocker l;
165 td = training_data_set()->find(&key);
166 if (td == nullptr) {
167 if (!null_if_not_found) {
168 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
169 if (ktd == nullptr) {
170 return nullptr; // allocation failure
171 }
172 mtd = MethodTrainingData::allocate(method(), ktd);
173 if (mtd == nullptr) {
174 return nullptr; // allocation failure
175 }
176 td = training_data_set()->install(mtd);
177 assert(td == mtd, "");
178 } else {
179 mtd = nullptr;
180 }
181 } else {
182 mtd = td->as_MethodTrainingData();
183 }
184 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
185 method->init_training_data(mtd);
186 }
187
188 return mtd;
189 }
190
191 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
192 if (has_holder()) {
193 _klass->print_on(st, true);
194 st->print(".");
195 name()->print_symbol_on(st);
196 signature()->print_symbol_on(st);
197 }
198 if (name_only) {
199 return;
200 }
201 if (!has_holder()) {
202 st->print("[SYM]");
203 }
204 if (_level_mask) {
205 st->print(" LM%d", _level_mask);
206 }
207 st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
208 }
209
210 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
211 int level = task->comp_level();
212 int compile_id = task->compile_id();
213 Thread* thread = Thread::current();
214 methodHandle m(thread, task->method());
215 if (m->method_holder() == nullptr) {
216 return nullptr; // do not record (dynamically generated method)
217 }
218 MethodTrainingData* mtd = MethodTrainingData::make(m);
219 if (mtd == nullptr) {
220 return nullptr; // allocation failure
221 }
222 mtd->notice_compilation(level);
223
224 TrainingDataLocker l;
225 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
226 if (ctd != nullptr) {
227 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
228 if (last_ctd != nullptr) {
229 assert(mtd->highest_top_level() >= level, "consistency");
230 if (last_ctd->compile_id() < compile_id) {
231 last_ctd->clear_init_deps();
232 last_ctd = ctd;
233 }
234 } else {
235 last_ctd = ctd;
236 mtd->notice_toplevel_compilation(level);
237 }
238 }
239 return ctd;
240 }
241
242
243 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) {
244 LogStreamHandle(Trace, training) log;
245 if (log.is_enabled()) {
246 log.print("CTD "); print_on(&log); log.cr();
247 log.print("KTD "); ktd->print_on(&log); log.cr();
248 }
249 assert(ktd!= nullptr && ktd->has_holder(), "");
250 assert(_init_deps.contains(ktd), "");
251 assert(_init_deps_left > 0, "");
252
253 uint init_deps_left1 = AtomicAccess::sub(&_init_deps_left, 1);
254
255 if (log.is_enabled()) {
256 uint init_deps_left2 = compute_init_deps_left();
257 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
258 ktd->print_on(&log, true);
259 }
260 }
261
262 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
263 int left = 0;
264 for (int i = 0; i < _init_deps.length(); i++) {
265 KlassTrainingData* ktd = _init_deps.at(i);
266 // Ignore symbolic refs and already initialized classes (unless explicitly requested).
267 if (ktd->has_holder()) {
268 InstanceKlass* holder = ktd->holder();
269 if (!ktd->holder()->is_initialized() || count_initialized) {
270 ++left;
271 } else if (holder->defined_by_other_loaders()) {
272 Key k(holder);
273 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
274 ++left;
275 }
276 }
277 }
278 }
279 return left;
280 }
281
282 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
283 _method->print_on(st, true);
284 st->print("#%dL%d", _compile_id, _level);
285 if (name_only) {
286 return;
287 }
288 if (_init_deps.length() > 0) {
289 if (_init_deps_left > 0) {
290 st->print(" udeps=%d", _init_deps_left);
291 }
292 for (int i = 0, len = _init_deps.length(); i < len; i++) {
293 st->print(" dep:");
294 _init_deps.at(i)->print_on(st, true);
295 }
296 }
297 }
298
299 void CompileTrainingData::notice_inlined_method(CompileTask* task,
300 const methodHandle& method) {
301 MethodTrainingData* mtd = MethodTrainingData::make(method);
302 if (mtd != nullptr) {
303 mtd->notice_compilation(task->comp_level(), true);
304 }
305 }
306
307 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
308 // A JIT is starting to look at class k.
309 // We could follow the queries that it is making, but it is
310 // simpler to assume, conservatively, that the JIT will
311 // eventually depend on the initialization state of k.
312 CompileTask* task = env->task();
313 assert(task != nullptr, "");
314 Method* method = task->method();
315 if (what->is_metadata()) {
316 ciMetadata* md = what->as_metadata();
317 if (md->is_loaded() && md->is_instance_klass()) {
318 ciInstanceKlass* cik = md->as_instance_klass();
319
320 if (cik->is_initialized()) {
321 InstanceKlass* ik = md->as_instance_klass()->get_instanceKlass();
322 KlassTrainingData* ktd = KlassTrainingData::make(ik);
323 if (ktd == nullptr) {
324 // Allocation failure or snapshot in progress
325 return;
326 }
327 // This JIT task is (probably) requesting that ik be initialized,
328 // so add him to my _init_deps list.
329 TrainingDataLocker l;
330 if (l.can_add()) {
331 add_init_dep(ktd);
332 }
333 }
334 }
335 }
336 }
337
338 void KlassTrainingData::prepare(Visitor& visitor) {
339 if (visitor.is_visited(this)) {
340 return;
341 }
342 visitor.visit(this);
343 _comp_deps.prepare();
344 }
345
346 void MethodTrainingData::prepare(Visitor& visitor) {
347 if (visitor.is_visited(this)) {
348 return;
349 }
350 visitor.visit(this);
351 klass()->prepare(visitor);
352 if (has_holder()) {
353 _final_counters = holder()->method_counters();
354 _final_profile = holder()->method_data();
355 assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
356 _invocation_count = holder()->invocation_count();
357 _backedge_count = holder()->backedge_count();
358 }
359 for (int i = 0; i < CompLevel_count - 1; i++) {
360 CompileTrainingData* ctd = _last_toplevel_compiles[i];
361 if (ctd != nullptr) {
362 ctd->prepare(visitor);
363 }
364 }
365 }
366
367 void CompileTrainingData::prepare(Visitor& visitor) {
368 if (visitor.is_visited(this)) {
369 return;
370 }
371 visitor.visit(this);
372 method()->prepare(visitor);
373 _init_deps.prepare();
374 _ci_records.prepare();
375 }
376
377 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
378 Key key(holder);
379 TrainingData* td = CDS_ONLY(have_data() ? lookup_archived_training_data(&key) :) nullptr;
380 KlassTrainingData* ktd = nullptr;
381 if (td != nullptr) {
382 ktd = td->as_KlassTrainingData();
383 guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
384 if (ktd->has_holder()) {
385 return ktd;
386 } else {
387 ktd = nullptr;
388 }
389 }
390 if (need_data()) {
391 TrainingDataLocker l;
392 td = training_data_set()->find(&key);
393 if (td == nullptr) {
394 if (null_if_not_found) {
395 return nullptr;
396 }
397 ktd = KlassTrainingData::allocate(holder);
398 if (ktd == nullptr) {
399 return nullptr; // allocation failure
400 }
401 td = training_data_set()->install(ktd);
402 assert(ktd == td, "");
403 } else {
404 ktd = td->as_KlassTrainingData();
405 guarantee(ktd->holder() != nullptr, "null holder");
406 }
407 assert(ktd != nullptr, "");
408 guarantee(ktd->holder() == holder, "");
409 }
410 return ktd;
411 }
412
413 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
414 if (has_holder()) {
415 name()->print_symbol_on(st);
416 switch (holder()->init_state()) {
417 case InstanceKlass::allocated: st->print("[A]"); break;
418 case InstanceKlass::loaded: st->print("[D]"); break;
419 case InstanceKlass::linked: st->print("[L]"); break;
420 case InstanceKlass::being_initialized: st->print("[i]"); break;
421 case InstanceKlass::fully_initialized: break;
422 case InstanceKlass::initialization_error: st->print("[E]"); break;
423 default: fatal("unknown state: %d", holder()->init_state());
424 }
425 if (holder()->is_interface()) {
426 st->print("I");
427 }
428 } else {
429 st->print("[SYM]");
430 }
431 if (name_only) {
432 return;
433 }
434 if (_comp_deps.length() > 0) {
435 for (int i = 0, len = _comp_deps.length(); i < len; i++) {
436 st->print(" dep:");
437 _comp_deps.at(i)->print_on(st, true);
438 }
439 }
440 }
441
442 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
443 assert(klass != nullptr, "");
444 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve
445 // the handle object.
446 OopHandle handle(Universe::vm_global(), klass->java_mirror());
447 _holder = klass;
448 assert(holder() == klass, "");
449 }
450
451 void KlassTrainingData::notice_fully_initialized() {
452 ResourceMark rm;
453 assert(has_holder(), "");
454 assert(holder()->is_initialized(), "wrong state: %s %s",
455 holder()->name()->as_C_string(), holder()->init_state_name());
456
457 TrainingDataLocker l; // Not a real lock if we don't collect the data,
458 // that's why we need the atomic decrement below.
459 for (int i = 0; i < comp_dep_count(); i++) {
460 comp_dep(i)->dec_init_deps_left_release(this);
461 }
462 holder()->set_has_init_deps_processed();
463 }
464
465 void TrainingData::init_dumptime_table(TRAPS) {
466 precond((!assembling_data() && !need_data()) || need_data() != assembling_data());
467 if (assembling_data()) {
468 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
469 _archived_training_data_dictionary.iterate([&](TrainingData* record) {
470 _dumptime_training_data_dictionary->append(record);
471 });
472 }
473 if (need_data()) {
474 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
475 TrainingDataLocker l;
476 TrainingDataLocker::snapshot();
477 ResourceMark rm;
478 Visitor visitor(training_data_set()->size());
479 training_data_set()->iterate([&](TrainingData* td) {
480 td->prepare(visitor);
481 if (!td->is_CompileTrainingData()) {
482 _dumptime_training_data_dictionary->append(td);
483 }
484 });
485 }
486
487 if (AOTVerifyTrainingData) {
488 TrainingData::verify();
489 }
490 }
491
492 void TrainingData::iterate_roots(MetaspaceClosure* it) {
493 if (_dumptime_training_data_dictionary != nullptr) {
494 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
495 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
496 }
497 }
498 }
499
500 void TrainingData::dump_training_data() {
501 if (_dumptime_training_data_dictionary != nullptr) {
502 CompactHashtableStats stats;
503 _archived_training_data_dictionary_for_dumping.reset();
504 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
505 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
506 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
507 #ifdef ASSERT
508 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
509 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
510 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
511 }
512 #endif // ASSERT
513 td = ArchiveBuilder::current()->get_buffered_addr(td);
514 uint hash = TrainingData::Key::cds_hash(td->key());
515 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
516 writer.add(hash, delta);
517 }
518 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
519 }
520 }
521
522 void TrainingData::cleanup_training_data() {
523 if (_dumptime_training_data_dictionary != nullptr) {
524 ResourceMark rm;
525 Visitor visitor(_dumptime_training_data_dictionary->length());
526 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
527 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
528 td->cleanup(visitor);
529 }
530 // Throw away all elements with empty keys
531 int j = 0;
532 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534 if (td->key()->is_empty()) {
535 continue;
536 }
537 if (i != j) { // no need to copy if it's the same
538 _dumptime_training_data_dictionary->at_put(j, td);
539 }
540 j++;
541 }
542 _dumptime_training_data_dictionary->trunc_to(j);
543 }
544 }
545
546 void KlassTrainingData::cleanup(Visitor& visitor) {
547 if (visitor.is_visited(this)) {
548 return;
549 }
550 visitor.visit(this);
551 if (has_holder()) {
552 bool is_excluded = !holder()->is_loaded();
553 if (CDSConfig::is_at_aot_safepoint()) {
554 // Check for AOT exclusion only at AOT safe point.
555 is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
556 }
557 if (is_excluded) {
558 ResourceMark rm;
559 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
560 _holder = nullptr;
561 key()->make_empty();
562 }
563 }
564 for (int i = 0; i < _comp_deps.length(); i++) {
565 _comp_deps.at(i)->cleanup(visitor);
566 }
567 }
568
569 void MethodTrainingData::cleanup(Visitor& visitor) {
570 if (visitor.is_visited(this)) {
571 return;
572 }
573 visitor.visit(this);
574 if (has_holder()) {
575 if (CDSConfig::is_at_aot_safepoint() && SystemDictionaryShared::should_be_excluded(holder()->method_holder())) {
576 // Check for AOT exclusion only at AOT safe point.
577 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
578 if (_final_profile != nullptr && _final_profile->method() != _holder) {
579 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
580 }
581 _final_profile = nullptr;
582 _final_counters = nullptr;
583 _holder = nullptr;
584 key()->make_empty();
585 }
586 }
587 for (int i = 0; i < CompLevel_count - 1; i++) {
588 CompileTrainingData* ctd = _last_toplevel_compiles[i];
589 if (ctd != nullptr) {
590 ctd->cleanup(visitor);
591 }
592 }
593 }
594
595 void KlassTrainingData::verify() {
596 for (int i = 0; i < comp_dep_count(); i++) {
597 CompileTrainingData* ctd = comp_dep(i);
598 if (!ctd->_init_deps.contains(this)) {
599 print_on(tty); tty->cr();
600 ctd->print_on(tty); tty->cr();
601 }
602 guarantee(ctd->_init_deps.contains(this), "");
603 }
604 }
605
606 void MethodTrainingData::verify(bool verify_dep_counter) {
607 iterate_compiles([&](CompileTrainingData* ctd) {
608 ctd->verify(verify_dep_counter);
609 });
610 }
611
612 void CompileTrainingData::verify(bool verify_dep_counter) {
613 for (int i = 0; i < init_dep_count(); i++) {
614 KlassTrainingData* ktd = init_dep(i);
615 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
616 LogStreamHandle(Info, training) log;
617 if (log.is_enabled()) {
618 ResourceMark rm;
619 log.print("CTD "); print_value_on(&log);
620 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
621 }
622 }
623 if (!ktd->_comp_deps.contains(this)) {
624 print_on(tty); tty->cr();
625 ktd->print_on(tty); tty->cr();
626 }
627 guarantee(ktd->_comp_deps.contains(this), "");
628 }
629
630 if (verify_dep_counter) {
631 int init_deps_left1 = init_deps_left_acquire();
632 int init_deps_left2 = compute_init_deps_left();
633
634 bool invariant = (init_deps_left1 >= init_deps_left2);
635 if (!invariant) {
636 print_on(tty);
637 tty->cr();
638 }
639 guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
640 }
641 }
642
643 void CompileTrainingData::cleanup(Visitor& visitor) {
644 if (visitor.is_visited(this)) {
645 return;
646 }
647 visitor.visit(this);
648 method()->cleanup(visitor);
649 }
650
651 void TrainingData::serialize(SerializeClosure* soc) {
652 if (soc->writing()) {
653 _archived_training_data_dictionary_for_dumping.serialize_header(soc);
654 } else {
655 _archived_training_data_dictionary.serialize_header(soc);
656 }
657 }
658
659 class TrainingDataPrinter : StackObj {
660 outputStream* _st;
661 int _index;
662 public:
663 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
664 void do_value(TrainingData* td) {
665 const char* type = (td->is_KlassTrainingData() ? "K" :
666 td->is_MethodTrainingData() ? "M" :
667 td->is_CompileTrainingData() ? "C" : "?");
668 _st->print("%4d: %p %s ", _index++, td, type);
669 td->print_on(_st);
670 _st->cr();
671 if (td->is_KlassTrainingData()) {
672 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
673 ResourceMark rm;
674 _st->print_raw(" C ");
675 ctd->print_on(_st);
676 _st->cr();
677 });
678 } else if (td->is_MethodTrainingData()) {
679 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
680 ResourceMark rm;
681 _st->print_raw(" C ");
682 ctd->print_on(_st);
683 _st->cr();
684 });
685 } else if (td->is_CompileTrainingData()) {
686 // ?
687 }
688 }
689 };
690
691 void TrainingData::print_archived_training_data_on(outputStream* st) {
692 st->print_cr("Archived TrainingData Dictionary");
693 TrainingDataPrinter tdp(st);
694 TrainingDataLocker::initialize();
695 _archived_training_data_dictionary.iterate(&tdp);
696 }
697
698 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
699 iter->push(const_cast<Metadata**>(&_meta));
700 }
701
702 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
703 _key.metaspace_pointers_do(iter);
704 }
705
706 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
707 return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
708 }
709
710 uint TrainingData::Key::cds_hash(const Key* const& k) {
711 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
712 }
713
714 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
715 // For this to work, all components of the key must be in shared metaspace.
716 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
717 return nullptr;
718 }
719 uint hash = TrainingData::Key::cds_hash(k);
720 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
721 if (td != nullptr) {
722 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) ||
723 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
724 return td;
725 } else {
726 ShouldNotReachHere();
727 }
728 }
729 return nullptr;
730 }
731
732 template <typename T>
733 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
734 iter->push(&_deps);
735 }
736
737 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
738 log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
739 TrainingData::metaspace_pointers_do(iter);
740 _comp_deps.metaspace_pointers_do(iter);
741 iter->push(&_holder);
742 }
743
744 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
745 log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
746 TrainingData::metaspace_pointers_do(iter);
747 iter->push(&_klass);
748 iter->push((Method**)&_holder);
749 for (int i = 0; i < CompLevel_count - 1; i++) {
750 iter->push(&_last_toplevel_compiles[i]);
751 }
752 iter->push(&_final_profile);
753 iter->push(&_final_counters);
754 }
755
756 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
757 log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
758 TrainingData::metaspace_pointers_do(iter);
759 _init_deps.metaspace_pointers_do(iter);
760 _ci_records.metaspace_pointers_do(iter);
761 iter->push(&_method);
762 }
763
764 template <typename T>
765 void TrainingData::DepList<T>::prepare() {
766 if (_deps == nullptr && _deps_dyn != nullptr) {
767 int len = _deps_dyn->length();
768 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
769 for (int i = 0; i < len; i++) {
770 _deps->at_put(i, _deps_dyn->at(i)); // copy
771 }
772 }
773 }
774
775 void KlassTrainingData::remove_unshareable_info() {
776 TrainingData::remove_unshareable_info();
777 _comp_deps.remove_unshareable_info();
778 }
779
780 void MethodTrainingData::remove_unshareable_info() {
781 TrainingData::remove_unshareable_info();
782 if (_final_counters != nullptr) {
783 _final_counters->remove_unshareable_info();
784 }
785 if (_final_profile != nullptr) {
786 _final_profile->remove_unshareable_info();
787 }
788 }
789
790 void CompileTrainingData::remove_unshareable_info() {
791 TrainingData::remove_unshareable_info();
792 _init_deps.remove_unshareable_info();
793 _ci_records.remove_unshareable_info();
794 _init_deps_left = compute_init_deps_left(true);
795 }