1 /*
2 * Copyright (c) 2025, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotCompressedPointers.hpp"
26 #include "cds/cdsConfig.hpp"
27 #include "ci/ciEnv.hpp"
28 #include "ci/ciMetadata.hpp"
29 #include "classfile/compactHashtable.hpp"
30 #include "classfile/javaClasses.hpp"
31 #include "classfile/symbolTable.hpp"
32 #include "classfile/systemDictionaryShared.hpp"
33 #include "compiler/compileTask.hpp"
34 #include "memory/metadataFactory.hpp"
35 #include "memory/metaspaceClosure.hpp"
36 #include "memory/resourceArea.hpp"
37 #include "memory/universe.hpp"
38 #include "oops/method.hpp"
39 #include "oops/method.inline.hpp"
40 #include "oops/methodCounters.hpp"
41 #include "oops/recompilationSchedule.hpp"
42 #include "oops/trainingData.hpp"
43 #include "runtime/arguments.hpp"
44 #include "runtime/javaThread.inline.hpp"
45 #include "runtime/jniHandles.inline.hpp"
46 #include "utilities/growableArray.hpp"
47
48 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
50 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
51 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
52 int TrainingData::TrainingDataLocker::_lock_mode;
53 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
54
55 MethodTrainingData::MethodTrainingData() {
56 // Used by cppVtables.cpp only
57 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
58 }
59
60 KlassTrainingData::KlassTrainingData() {
61 // Used by cppVtables.cpp only
62 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
63 }
64
65 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
66 // Used by cppVtables.cpp only
67 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
68 }
69
70 void TrainingData::initialize() {
71 // this is a nop if training modes are not enabled
72 if (have_data() || need_data()) {
73 TrainingDataLocker::initialize();
74 }
75 RecompilationSchedule::initialize();
76 if (have_data() && need_data()) {
77 TrainingDataLocker l;
78 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
79 training_data_set()->install(td);
80 });
81 }
82 }
83
84 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
85 guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
86 TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
87 guarantee(td == td1, "");
88 }
89
90 void TrainingData::verify() {
91 if (have_data() && !need_data() && !assembling_data()) {
92 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
93 if (td->is_KlassTrainingData()) {
94 KlassTrainingData* ktd = td->as_KlassTrainingData();
95 if (ktd->has_holder() && ktd->holder()->is_loaded()) {
96 Key k(ktd->holder());
97 verify_archived_entry(td, &k);
98 }
99 ktd->verify();
100 } else if (td->is_MethodTrainingData()) {
101 MethodTrainingData* mtd = td->as_MethodTrainingData();
102 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
103 Key k(mtd->holder());
104 verify_archived_entry(td, &k);
105 }
106 mtd->verify(/*verify_dep_counter*/true);
107 }
108 });
109 }
110 if (need_data()) {
111 TrainingDataLocker l;
112 training_data_set()->iterate([&](TrainingData* td) {
113 if (td->is_KlassTrainingData()) {
114 KlassTrainingData* ktd = td->as_KlassTrainingData();
115 ktd->verify();
116 } else if (td->is_MethodTrainingData()) {
117 MethodTrainingData* mtd = td->as_MethodTrainingData();
118 // During the training run init deps tracking is not setup yet,
119 // don't verify it.
120 mtd->verify(/*verify_dep_counter*/false);
121 }
122 });
123 }
124 }
125
126 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
127 MethodTrainingData* mtd = nullptr;
128 if (!have_data() && !need_data()) {
129 return mtd;
130 }
131 // Try grabbing the cached value first.
132 // Cache value is stored in MethodCounters and the following are the
133 // possible states:
134 // 1. Cached value is method_training_data_sentinel().
135 // This is an initial state and needs a full lookup.
136 // 2. Cached value is null.
137 // Lookup failed the last time, if we don't plan to create a new TD object,
138 // i.e. null_if_no_found == true, then just return a null.
139 // 3. Cache value is not null.
140 // Return it, the value of training_data_lookup_failed doesn't matter.
141 MethodCounters* mcs = method->method_counters();
142 if (mcs != nullptr) {
143 mtd = mcs->method_training_data();
144 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
145 return mtd;
146 }
147 if (null_if_not_found && mtd == nullptr) {
148 assert(mtd == nullptr, "No training data found");
149 return nullptr;
150 }
151 } else if (use_cache) {
152 mcs = Method::build_method_counters(Thread::current(), method());
153 }
154
155 TrainingData* td = nullptr;
156
157 Key key(method());
158 if (have_data() && !need_data()) {
159 td = lookup_archived_training_data(&key);
160 if (td != nullptr) {
161 mtd = td->as_MethodTrainingData();
162 } else {
163 mtd = nullptr;
164 }
165 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
166 method->init_training_data(mtd);
167 }
168
169 if (need_data()) {
170 TrainingDataLocker l;
171 td = training_data_set()->find(&key);
172 if (td == nullptr) {
173 if (!null_if_not_found) {
174 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
175 if (ktd == nullptr) {
176 return nullptr; // allocation failure
177 }
178 mtd = MethodTrainingData::allocate(method(), ktd);
179 if (mtd == nullptr) {
180 return nullptr; // allocation failure
181 }
182 td = training_data_set()->install(mtd);
183 assert(td == mtd, "");
184 } else {
185 mtd = nullptr;
186 }
187 } else {
188 mtd = td->as_MethodTrainingData();
189 }
190 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
191 method->init_training_data(mtd);
192 }
193
194 return mtd;
195 }
196
197 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
198 if (has_holder()) {
199 _klass->print_on(st, true);
200 st->print(".");
201 name()->print_symbol_on(st);
202 signature()->print_symbol_on(st);
203 }
204 if (name_only) {
205 return;
206 }
207 if (!has_holder()) {
208 st->print("[SYM]");
209 }
210 if (_level_mask) {
211 st->print(" LM%d", _level_mask);
212 }
213 st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
214 }
215
216 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
217 int level = task->comp_level();
218 int compile_id = task->compile_id();
219 Thread* thread = Thread::current();
220 methodHandle m(thread, task->method());
221 if (m->method_holder() == nullptr) {
222 return nullptr; // do not record (dynamically generated method)
223 }
224 MethodTrainingData* mtd = MethodTrainingData::make(m);
225 if (mtd == nullptr) {
226 return nullptr; // allocation failure
227 }
228 mtd->notice_compilation(level);
229
230 TrainingDataLocker l;
231 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
232 if (ctd != nullptr) {
233 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
234 if (last_ctd != nullptr) {
235 assert(mtd->highest_top_level() >= level, "consistency");
236 if (last_ctd->compile_id() < compile_id) {
237 last_ctd->clear_init_deps();
238 last_ctd = ctd;
239 }
240 } else {
241 last_ctd = ctd;
242 mtd->notice_toplevel_compilation(level);
243 }
244 }
245 return ctd;
246 }
247
248
249 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) {
250 LogStreamHandle(Trace, training) log;
251 if (log.is_enabled()) {
252 log.print("CTD "); print_on(&log); log.cr();
253 log.print("KTD "); ktd->print_on(&log); log.cr();
254 }
255 assert(ktd!= nullptr && ktd->has_holder(), "");
256 assert(_init_deps.contains(ktd), "");
257 assert(_init_deps_left > 0, "");
258
259 uint init_deps_left1 = AtomicAccess::sub(&_init_deps_left, 1);
260
261 if (log.is_enabled()) {
262 uint init_deps_left2 = compute_init_deps_left();
263 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
264 ktd->print_on(&log, true);
265 }
266 }
267
268 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
269 int left = 0;
270 for (int i = 0; i < _init_deps.length(); i++) {
271 KlassTrainingData* ktd = _init_deps.at(i);
272 // Ignore symbolic refs and already initialized classes (unless explicitly requested).
273 if (ktd->has_holder()) {
274 InstanceKlass* holder = ktd->holder();
275 if (!ktd->holder()->is_initialized() || count_initialized) {
276 ++left;
277 } else if (holder->defined_by_other_loaders()) {
278 Key k(holder);
279 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
280 ++left;
281 }
282 }
283 }
284 }
285 return left;
286 }
287
288 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
289 _method->print_on(st, true);
290 st->print("#%dL%d", _compile_id, _level);
291 if (name_only) {
292 return;
293 }
294 if (_init_deps.length() > 0) {
295 if (_init_deps_left > 0) {
296 st->print(" udeps=%d", _init_deps_left);
297 }
298 for (int i = 0, len = _init_deps.length(); i < len; i++) {
299 st->print(" dep:");
300 _init_deps.at(i)->print_on(st, true);
301 }
302 }
303 }
304
305 void CompileTrainingData::notice_inlined_method(CompileTask* task,
306 const methodHandle& method) {
307 MethodTrainingData* mtd = MethodTrainingData::make(method);
308 if (mtd != nullptr) {
309 mtd->notice_compilation(task->comp_level(), true);
310 }
311 }
312
313 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
314 // A JIT is starting to look at class k.
315 // We could follow the queries that it is making, but it is
316 // simpler to assume, conservatively, that the JIT will
317 // eventually depend on the initialization state of k.
318 ciMetadata* md = nullptr;
319 if (what->is_object()) {
320 md = what->as_object()->klass();
321 } else if (what->is_metadata()) {
322 md = what->as_metadata();
323 }
324 if (md != nullptr && md->is_loaded() && md->is_instance_klass()) {
325 ciInstanceKlass* cik = md->as_instance_klass();
326 if (!cik->is_initialized()) {
327 return;
328 }
329 KlassTrainingData* ktd = KlassTrainingData::make(cik->get_instanceKlass());
330 if (ktd == nullptr) {
331 // Allocation failure or snapshot in progress
332 return;
333 }
334 // This JIT task is (probably) requesting that ik be initialized,
335 // so add it to my _init_deps list.
336 TrainingDataLocker l;
337 if (l.can_add()) {
338 add_init_dep(ktd);
339 }
340 }
341 }
342
343 void KlassTrainingData::prepare(Visitor& visitor) {
344 if (visitor.is_visited(this)) {
345 return;
346 }
347 visitor.visit(this);
348 _comp_deps.prepare();
349 }
350
351 void MethodTrainingData::prepare(Visitor& visitor) {
352 if (visitor.is_visited(this)) {
353 return;
354 }
355 visitor.visit(this);
356 klass()->prepare(visitor);
357 if (has_holder()) {
358 _final_counters = holder()->method_counters();
359 _final_profile = holder()->method_data();
360 assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
361 _invocation_count = holder()->invocation_count();
362 _backedge_count = holder()->backedge_count();
363 _aot_code_invocation_limit = _final_counters->jit_code_invocation_count();
364 }
365 for (int i = 0; i < CompLevel_count - 1; i++) {
366 CompileTrainingData* ctd = _last_toplevel_compiles[i];
367 if (ctd != nullptr) {
368 ctd->prepare(visitor);
369 }
370 }
371 }
372
373 void CompileTrainingData::prepare(Visitor& visitor) {
374 if (visitor.is_visited(this)) {
375 return;
376 }
377 visitor.visit(this);
378 method()->prepare(visitor);
379 _init_deps.prepare();
380 _ci_records.prepare();
381 }
382
383 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
384 Key key(holder);
385 TrainingData* td = CDS_ONLY((have_data() && !need_data()) ? lookup_archived_training_data(&key) :) nullptr;
386 KlassTrainingData* ktd = nullptr;
387 if (td != nullptr) {
388 ktd = td->as_KlassTrainingData();
389 guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
390 if (ktd->has_holder()) {
391 return ktd;
392 } else {
393 ktd = nullptr;
394 }
395 }
396 if (need_data()) {
397 TrainingDataLocker l;
398 td = training_data_set()->find(&key);
399 if (td == nullptr) {
400 if (null_if_not_found) {
401 return nullptr;
402 }
403 ktd = KlassTrainingData::allocate(holder);
404 if (ktd == nullptr) {
405 return nullptr; // allocation failure
406 }
407 td = training_data_set()->install(ktd);
408 assert(ktd == td, "");
409 } else {
410 ktd = td->as_KlassTrainingData();
411 guarantee(ktd->holder() != nullptr, "null holder");
412 }
413 assert(ktd != nullptr, "");
414 guarantee(ktd->holder() == holder, "");
415 }
416 return ktd;
417 }
418
419 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
420 if (has_holder()) {
421 name()->print_symbol_on(st);
422 switch (holder()->init_state()) {
423 case InstanceKlass::allocated: st->print("[A]"); break;
424 case InstanceKlass::loaded: st->print("[D]"); break;
425 case InstanceKlass::linked: st->print("[L]"); break;
426 case InstanceKlass::being_initialized: st->print("[i]"); break;
427 case InstanceKlass::fully_initialized: break;
428 case InstanceKlass::initialization_error: st->print("[E]"); break;
429 default: fatal("unknown state: %d", holder()->init_state());
430 }
431 if (holder()->is_interface()) {
432 st->print("I");
433 }
434 } else {
435 st->print("[SYM]");
436 }
437 if (name_only) {
438 return;
439 }
440 if (_comp_deps.length() > 0) {
441 for (int i = 0, len = _comp_deps.length(); i < len; i++) {
442 st->print(" dep:");
443 _comp_deps.at(i)->print_on(st, true);
444 }
445 }
446 }
447
448 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
449 assert(klass != nullptr, "");
450 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve
451 // the handle object.
452 OopHandle handle(Universe::vm_global(), klass->java_mirror());
453 _holder = klass;
454 assert(holder() == klass, "");
455 }
456
457 void KlassTrainingData::notice_fully_initialized() {
458 ResourceMark rm;
459 assert(has_holder(), "");
460 assert(holder()->is_initialized(), "wrong state: %s %s",
461 holder()->name()->as_C_string(), holder()->init_state_name());
462
463 TrainingDataLocker l; // Not a real lock if we don't collect the data,
464 // that's why we need the atomic decrement below.
465 for (int i = 0; i < comp_dep_count(); i++) {
466 comp_dep(i)->dec_init_deps_left_release(this);
467 }
468 holder()->set_has_init_deps_processed();
469 }
470
471 void TrainingData::init_dumptime_table(TRAPS) {
472 if (assembling_data() && !need_data()) {
473 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
474 _archived_training_data_dictionary.iterate_all([&](TrainingData* record) {
475 _dumptime_training_data_dictionary->append(record);
476 });
477 }
478 if (need_data()) {
479 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
480 TrainingDataLocker l;
481 TrainingDataLocker::snapshot();
482 ResourceMark rm;
483 Visitor visitor(training_data_set()->size());
484 training_data_set()->iterate([&](TrainingData* td) {
485 td->prepare(visitor);
486 if (!td->is_CompileTrainingData()) {
487 _dumptime_training_data_dictionary->append(td);
488 }
489 });
490 }
491
492 RecompilationSchedule::prepare(CHECK);
493
494 if (AOTVerifyTrainingData) {
495 TrainingData::verify();
496 }
497 }
498
499 void TrainingData::iterate_roots(MetaspaceClosure* it) {
500 if (_dumptime_training_data_dictionary != nullptr) {
501 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
502 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
503 }
504 }
505 RecompilationSchedule::iterate_roots(it);
506 }
507
508 void TrainingData::dump_training_data() {
509 if (_dumptime_training_data_dictionary != nullptr) {
510 CompactHashtableStats stats;
511 _archived_training_data_dictionary_for_dumping.reset();
512 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
513 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
514 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
515 #ifdef ASSERT
516 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
517 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
518 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
519 }
520 #endif // ASSERT
521 td = ArchiveBuilder::current()->get_buffered_addr(td);
522 uint hash = TrainingData::Key::cds_hash(td->key());
523 writer.add(hash, AOTCompressedPointers::encode_not_null(td));
524 }
525 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
526 }
527 }
528
529 void TrainingData::cleanup_training_data() {
530 if (_dumptime_training_data_dictionary != nullptr) {
531 ResourceMark rm;
532 Visitor visitor(_dumptime_training_data_dictionary->length());
533 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
534 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
535 td->cleanup(visitor);
536 }
537 // Throw away all elements with empty keys
538 int j = 0;
539 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
540 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
541 if (td->key()->is_empty()) {
542 continue;
543 }
544 if (i != j) { // no need to copy if it's the same
545 _dumptime_training_data_dictionary->at_put(j, td);
546 }
547 j++;
548 }
549 _dumptime_training_data_dictionary->trunc_to(j);
550 }
551 RecompilationSchedule::cleanup();
552 }
553
554 void KlassTrainingData::cleanup(Visitor& visitor) {
555 if (visitor.is_visited(this)) {
556 return;
557 }
558 visitor.visit(this);
559 if (has_holder()) {
560 bool is_excluded = !holder()->is_loaded();
561 if (CDSConfig::is_at_aot_safepoint()) {
562 // Check for AOT exclusion only at AOT safe point.
563 is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
564 }
565 if (is_excluded) {
566 ResourceMark rm;
567 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
568 _holder = nullptr;
569 key()->make_empty();
570 }
571 }
572 for (int i = 0; i < _comp_deps.length(); i++) {
573 _comp_deps.at(i)->cleanup(visitor);
574 }
575 }
576
577 void MethodTrainingData::cleanup(Visitor& visitor) {
578 if (visitor.is_visited(this)) {
579 return;
580 }
581 visitor.visit(this);
582 if (has_holder()) {
583 if (CDSConfig::is_at_aot_safepoint() && SystemDictionaryShared::should_be_excluded(holder()->method_holder())) {
584 // Check for AOT exclusion only at AOT safe point.
585 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
586 if (_final_profile != nullptr && _final_profile->method() != _holder) {
587 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
588 }
589 _final_profile = nullptr;
590 _final_counters = nullptr;
591 _holder = nullptr;
592 key()->make_empty();
593 }
594 }
595 for (int i = 0; i < CompLevel_count - 1; i++) {
596 CompileTrainingData* ctd = _last_toplevel_compiles[i];
597 if (ctd != nullptr) {
598 ctd->cleanup(visitor);
599 }
600 }
601 }
602
603 void KlassTrainingData::verify() {
604 for (int i = 0; i < comp_dep_count(); i++) {
605 CompileTrainingData* ctd = comp_dep(i);
606 if (!ctd->_init_deps.contains(this)) {
607 print_on(tty); tty->cr();
608 ctd->print_on(tty); tty->cr();
609 }
610 guarantee(ctd->_init_deps.contains(this), "");
611 }
612 }
613
614 void MethodTrainingData::verify(bool verify_dep_counter) {
615 iterate_compiles([&](CompileTrainingData* ctd) {
616 ctd->verify(verify_dep_counter);
617 });
618 }
619
620 void CompileTrainingData::verify(bool verify_dep_counter) {
621 for (int i = 0; i < init_dep_count(); i++) {
622 KlassTrainingData* ktd = init_dep(i);
623 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
624 LogStreamHandle(Info, training) log;
625 if (log.is_enabled()) {
626 ResourceMark rm;
627 log.print("CTD "); print_value_on(&log);
628 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
629 }
630 }
631 if (!ktd->_comp_deps.contains(this)) {
632 print_on(tty); tty->cr();
633 ktd->print_on(tty); tty->cr();
634 }
635 guarantee(ktd->_comp_deps.contains(this), "");
636 }
637
638 if (verify_dep_counter) {
639 int init_deps_left1 = init_deps_left_acquire();
640 int init_deps_left2 = compute_init_deps_left();
641
642 bool invariant = (init_deps_left1 >= init_deps_left2);
643 if (!invariant) {
644 print_on(tty);
645 tty->cr();
646 }
647 guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
648 }
649 }
650
651 void CompileTrainingData::cleanup(Visitor& visitor) {
652 if (visitor.is_visited(this)) {
653 return;
654 }
655 visitor.visit(this);
656 method()->cleanup(visitor);
657 }
658
659 void TrainingData::serialize(SerializeClosure* soc) {
660 if (soc->writing()) {
661 _archived_training_data_dictionary_for_dumping.serialize_header(soc);
662 } else {
663 _archived_training_data_dictionary.serialize_header(soc);
664 }
665 RecompilationSchedule::serialize(soc);
666 }
667
668 class TrainingDataPrinter : StackObj {
669 outputStream* _st;
670 int _index;
671 public:
672 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
673 void do_value(TrainingData* td) {
674 const char* type = (td->is_KlassTrainingData() ? "K" :
675 td->is_MethodTrainingData() ? "M" :
676 td->is_CompileTrainingData() ? "C" : "?");
677 _st->print("%4d: %p %s ", _index++, td, type);
678 td->print_on(_st);
679 _st->cr();
680 if (td->is_KlassTrainingData()) {
681 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
682 ResourceMark rm;
683 _st->print_raw(" C ");
684 ctd->print_on(_st);
685 _st->cr();
686 });
687 } else if (td->is_MethodTrainingData()) {
688 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
689 ResourceMark rm;
690 _st->print_raw(" C ");
691 ctd->print_on(_st);
692 _st->cr();
693 });
694 } else if (td->is_CompileTrainingData()) {
695 // ?
696 }
697 }
698 };
699
700 void TrainingData::print_archived_training_data_on(outputStream* st) {
701 st->print_cr("Archived TrainingData Dictionary");
702 TrainingDataPrinter tdp(st);
703 TrainingDataLocker::initialize();
704 _archived_training_data_dictionary.iterate_all(&tdp);
705 RecompilationSchedule::print_archived_training_data_on(st);
706 }
707
708 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
709 iter->push(const_cast<Metadata**>(&_meta));
710 }
711
712 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
713 _key.metaspace_pointers_do(iter);
714 }
715
716 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
717 return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
718 }
719
720 uint TrainingData::Key::cds_hash(const Key* const& k) {
721 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
722 }
723
724 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
725 assert(!need_data(), "Should be used only in read-only mode");
726 // For this to work, all components of the key must be in shared metaspace.
727 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
728 return nullptr;
729 }
730 uint hash = TrainingData::Key::cds_hash(k);
731 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
732 if (td != nullptr) {
733 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) ||
734 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
735 return td;
736 } else {
737 ShouldNotReachHere();
738 }
739 }
740 return nullptr;
741 }
742
743 template <typename T>
744 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
745 iter->push(&_deps);
746 }
747
748 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
749 log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
750 TrainingData::metaspace_pointers_do(iter);
751 _comp_deps.metaspace_pointers_do(iter);
752 iter->push(&_holder);
753 }
754
755 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
756 log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
757 TrainingData::metaspace_pointers_do(iter);
758 iter->push(&_klass);
759 iter->push((Method**)&_holder);
760 for (int i = 0; i < CompLevel_count - 1; i++) {
761 iter->push(&_last_toplevel_compiles[i]);
762 }
763 iter->push(&_final_profile);
764 iter->push(&_final_counters);
765 }
766
767 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
768 log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
769 TrainingData::metaspace_pointers_do(iter);
770 _init_deps.metaspace_pointers_do(iter);
771 _ci_records.metaspace_pointers_do(iter);
772 iter->push(&_method);
773 }
774
775 template <typename T>
776 void TrainingData::DepList<T>::prepare() {
777 if (_deps == nullptr && _deps_dyn != nullptr) {
778 int len = _deps_dyn->length();
779 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
780 for (int i = 0; i < len; i++) {
781 _deps->at_put(i, _deps_dyn->at(i)); // copy
782 }
783 _deps_dyn = nullptr;
784 }
785 }
786
787 void KlassTrainingData::remove_unshareable_info() {
788 TrainingData::remove_unshareable_info();
789 _comp_deps.remove_unshareable_info();
790 }
791
792 void MethodTrainingData::remove_unshareable_info() {
793 TrainingData::remove_unshareable_info();
794 if (_final_counters != nullptr) {
795 _final_counters->remove_unshareable_info();
796 }
797 if (_final_profile != nullptr) {
798 _final_profile->remove_unshareable_info();
799 }
800 }
801
802 void CompileTrainingData::remove_unshareable_info() {
803 TrainingData::remove_unshareable_info();
804 _init_deps.remove_unshareable_info();
805 _ci_records.remove_unshareable_info();
806 _init_deps_left = compute_init_deps_left(true);
807 }