1 /*
2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/cdsConfig.hpp"
26 #include "ci/ciEnv.hpp"
27 #include "ci/ciMetadata.hpp"
28 #include "classfile/compactHashtable.hpp"
29 #include "classfile/javaClasses.hpp"
30 #include "classfile/symbolTable.hpp"
31 #include "classfile/systemDictionaryShared.hpp"
32 #include "compiler/compileTask.hpp"
33 #include "memory/metadataFactory.hpp"
34 #include "memory/metaspaceClosure.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/method.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/methodCounters.hpp"
40 #include "oops/recompilationSchedule.hpp"
41 #include "oops/trainingData.hpp"
42 #include "runtime/arguments.hpp"
43 #include "runtime/javaThread.inline.hpp"
44 #include "runtime/jniHandles.inline.hpp"
45 #include "utilities/growableArray.hpp"
46
47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
51 int TrainingData::TrainingDataLocker::_lock_mode;
52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
53
54 MethodTrainingData::MethodTrainingData() {
55 // Used by cppVtables.cpp only
56 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
57 }
58
59 KlassTrainingData::KlassTrainingData() {
60 // Used by cppVtables.cpp only
61 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
62 }
63
64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
65 // Used by cppVtables.cpp only
66 assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
67 }
68
69 void TrainingData::initialize() {
70 // this is a nop if training modes are not enabled
71 if (have_data() || need_data()) {
72 TrainingDataLocker::initialize();
73 }
74 RecompilationSchedule::initialize();
75 if (have_data() && need_data()) {
76 TrainingDataLocker l;
77 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
78 training_data_set()->install(td);
79 });
80 }
81 }
82
83 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
84 guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
85 TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
86 guarantee(td == td1, "");
87 }
88
89 void TrainingData::verify() {
90 if (have_data() && !need_data() && !assembling_data()) {
91 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) {
92 if (td->is_KlassTrainingData()) {
93 KlassTrainingData* ktd = td->as_KlassTrainingData();
94 if (ktd->has_holder() && ktd->holder()->is_loaded()) {
95 Key k(ktd->holder());
96 verify_archived_entry(td, &k);
97 }
98 ktd->verify();
99 } else if (td->is_MethodTrainingData()) {
100 MethodTrainingData* mtd = td->as_MethodTrainingData();
101 if (mtd->has_holder() && mtd->holder()->method_holder()->is_loaded()) {
102 Key k(mtd->holder());
103 verify_archived_entry(td, &k);
104 }
105 mtd->verify(/*verify_dep_counter*/true);
106 }
107 });
108 }
109 if (need_data()) {
110 TrainingDataLocker l;
111 training_data_set()->iterate([&](TrainingData* td) {
112 if (td->is_KlassTrainingData()) {
113 KlassTrainingData* ktd = td->as_KlassTrainingData();
114 ktd->verify();
115 } else if (td->is_MethodTrainingData()) {
116 MethodTrainingData* mtd = td->as_MethodTrainingData();
117 // During the training run init deps tracking is not setup yet,
118 // don't verify it.
119 mtd->verify(/*verify_dep_counter*/false);
120 }
121 });
122 }
123 }
124
125 MethodTrainingData* MethodTrainingData::make(const methodHandle& method, bool null_if_not_found, bool use_cache) {
126 MethodTrainingData* mtd = nullptr;
127 if (!have_data() && !need_data()) {
128 return mtd;
129 }
130 // Try grabbing the cached value first.
131 // Cache value is stored in MethodCounters and the following are the
132 // possible states:
133 // 1. Cached value is method_training_data_sentinel().
134 // This is an initial state and needs a full lookup.
135 // 2. Cached value is null.
136 // Lookup failed the last time, if we don't plan to create a new TD object,
137 // i.e. null_if_no_found == true, then just return a null.
138 // 3. Cache value is not null.
139 // Return it, the value of training_data_lookup_failed doesn't matter.
140 MethodCounters* mcs = method->method_counters();
141 if (mcs != nullptr) {
142 mtd = mcs->method_training_data();
143 if (mtd != nullptr && mtd != mcs->method_training_data_sentinel()) {
144 return mtd;
145 }
146 if (null_if_not_found && mtd == nullptr) {
147 assert(mtd == nullptr, "No training data found");
148 return nullptr;
149 }
150 } else if (use_cache) {
151 mcs = Method::build_method_counters(Thread::current(), method());
152 }
153
154 TrainingData* td = nullptr;
155
156 Key key(method());
157 if (have_data() && !need_data()) {
158 td = lookup_archived_training_data(&key);
159 if (td != nullptr) {
160 mtd = td->as_MethodTrainingData();
161 } else {
162 mtd = nullptr;
163 }
164 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
165 method->init_training_data(mtd);
166 }
167
168 if (need_data()) {
169 TrainingDataLocker l;
170 td = training_data_set()->find(&key);
171 if (td == nullptr) {
172 if (!null_if_not_found) {
173 KlassTrainingData* ktd = KlassTrainingData::make(method->method_holder());
174 if (ktd == nullptr) {
175 return nullptr; // allocation failure
176 }
177 mtd = MethodTrainingData::allocate(method(), ktd);
178 if (mtd == nullptr) {
179 return nullptr; // allocation failure
180 }
181 td = training_data_set()->install(mtd);
182 assert(td == mtd, "");
183 } else {
184 mtd = nullptr;
185 }
186 } else {
187 mtd = td->as_MethodTrainingData();
188 }
189 // Cache the pointer to MTD in MethodCounters for faster lookup (could be null if not found)
190 method->init_training_data(mtd);
191 }
192
193 return mtd;
194 }
195
196 void MethodTrainingData::print_on(outputStream* st, bool name_only) const {
197 if (has_holder()) {
198 _klass->print_on(st, true);
199 st->print(".");
200 name()->print_symbol_on(st);
201 signature()->print_symbol_on(st);
202 }
203 if (name_only) {
204 return;
205 }
206 if (!has_holder()) {
207 st->print("[SYM]");
208 }
209 if (_level_mask) {
210 st->print(" LM%d", _level_mask);
211 }
212 st->print(" mc=%p mdo=%p", _final_counters, _final_profile);
213 }
214
215 CompileTrainingData* CompileTrainingData::make(CompileTask* task) {
216 int level = task->comp_level();
217 int compile_id = task->compile_id();
218 Thread* thread = Thread::current();
219 methodHandle m(thread, task->method());
220 if (m->method_holder() == nullptr) {
221 return nullptr; // do not record (dynamically generated method)
222 }
223 MethodTrainingData* mtd = MethodTrainingData::make(m);
224 if (mtd == nullptr) {
225 return nullptr; // allocation failure
226 }
227 mtd->notice_compilation(level);
228
229 TrainingDataLocker l;
230 CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
231 if (ctd != nullptr) {
232 CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
233 if (last_ctd != nullptr) {
234 assert(mtd->highest_top_level() >= level, "consistency");
235 if (last_ctd->compile_id() < compile_id) {
236 last_ctd->clear_init_deps();
237 last_ctd = ctd;
238 }
239 } else {
240 last_ctd = ctd;
241 mtd->notice_toplevel_compilation(level);
242 }
243 }
244 return ctd;
245 }
246
247
248 void CompileTrainingData::dec_init_deps_left_release(KlassTrainingData* ktd) {
249 LogStreamHandle(Trace, training) log;
250 if (log.is_enabled()) {
251 log.print("CTD "); print_on(&log); log.cr();
252 log.print("KTD "); ktd->print_on(&log); log.cr();
253 }
254 assert(ktd!= nullptr && ktd->has_holder(), "");
255 assert(_init_deps.contains(ktd), "");
256 assert(_init_deps_left > 0, "");
257
258 uint init_deps_left1 = AtomicAccess::sub(&_init_deps_left, 1);
259
260 if (log.is_enabled()) {
261 uint init_deps_left2 = compute_init_deps_left();
262 log.print("init_deps_left: %d (%d)", init_deps_left1, init_deps_left2);
263 ktd->print_on(&log, true);
264 }
265 }
266
267 uint CompileTrainingData::compute_init_deps_left(bool count_initialized) {
268 int left = 0;
269 for (int i = 0; i < _init_deps.length(); i++) {
270 KlassTrainingData* ktd = _init_deps.at(i);
271 // Ignore symbolic refs and already initialized classes (unless explicitly requested).
272 if (ktd->has_holder()) {
273 InstanceKlass* holder = ktd->holder();
274 if (!ktd->holder()->is_initialized() || count_initialized) {
275 ++left;
276 } else if (holder->defined_by_other_loaders()) {
277 Key k(holder);
278 if (CDS_ONLY(!Key::can_compute_cds_hash(&k)) NOT_CDS(true)) {
279 ++left;
280 }
281 }
282 }
283 }
284 return left;
285 }
286
287 void CompileTrainingData::print_on(outputStream* st, bool name_only) const {
288 _method->print_on(st, true);
289 st->print("#%dL%d", _compile_id, _level);
290 if (name_only) {
291 return;
292 }
293 if (_init_deps.length() > 0) {
294 if (_init_deps_left > 0) {
295 st->print(" udeps=%d", _init_deps_left);
296 }
297 for (int i = 0, len = _init_deps.length(); i < len; i++) {
298 st->print(" dep:");
299 _init_deps.at(i)->print_on(st, true);
300 }
301 }
302 }
303
304 void CompileTrainingData::notice_inlined_method(CompileTask* task,
305 const methodHandle& method) {
306 MethodTrainingData* mtd = MethodTrainingData::make(method);
307 if (mtd != nullptr) {
308 mtd->notice_compilation(task->comp_level(), true);
309 }
310 }
311
312 void CompileTrainingData::notice_jit_observation(ciEnv* env, ciBaseObject* what) {
313 // A JIT is starting to look at class k.
314 // We could follow the queries that it is making, but it is
315 // simpler to assume, conservatively, that the JIT will
316 // eventually depend on the initialization state of k.
317 ciMetadata* md = nullptr;
318 if (what->is_object()) {
319 md = what->as_object()->klass();
320 } else if (what->is_metadata()) {
321 md = what->as_metadata();
322 }
323 if (md != nullptr && md->is_loaded() && md->is_instance_klass()) {
324 ciInstanceKlass* cik = md->as_instance_klass();
325 if (!cik->is_initialized()) {
326 return;
327 }
328 KlassTrainingData* ktd = KlassTrainingData::make(cik->get_instanceKlass());
329 if (ktd == nullptr) {
330 // Allocation failure or snapshot in progress
331 return;
332 }
333 // This JIT task is (probably) requesting that ik be initialized,
334 // so add it to my _init_deps list.
335 TrainingDataLocker l;
336 if (l.can_add()) {
337 add_init_dep(ktd);
338 }
339 }
340 }
341
342 void KlassTrainingData::prepare(Visitor& visitor) {
343 if (visitor.is_visited(this)) {
344 return;
345 }
346 visitor.visit(this);
347 _comp_deps.prepare();
348 }
349
350 void MethodTrainingData::prepare(Visitor& visitor) {
351 if (visitor.is_visited(this)) {
352 return;
353 }
354 visitor.visit(this);
355 klass()->prepare(visitor);
356 if (has_holder()) {
357 _final_counters = holder()->method_counters();
358 _final_profile = holder()->method_data();
359 assert(_final_profile == nullptr || _final_profile->method() == holder(), "");
360 _invocation_count = holder()->invocation_count();
361 _backedge_count = holder()->backedge_count();
362 }
363 for (int i = 0; i < CompLevel_count - 1; i++) {
364 CompileTrainingData* ctd = _last_toplevel_compiles[i];
365 if (ctd != nullptr) {
366 ctd->prepare(visitor);
367 }
368 }
369 }
370
371 void CompileTrainingData::prepare(Visitor& visitor) {
372 if (visitor.is_visited(this)) {
373 return;
374 }
375 visitor.visit(this);
376 method()->prepare(visitor);
377 _init_deps.prepare();
378 _ci_records.prepare();
379 }
380
381 KlassTrainingData* KlassTrainingData::make(InstanceKlass* holder, bool null_if_not_found) {
382 Key key(holder);
383 TrainingData* td = CDS_ONLY((have_data() && !need_data()) ? lookup_archived_training_data(&key) :) nullptr;
384 KlassTrainingData* ktd = nullptr;
385 if (td != nullptr) {
386 ktd = td->as_KlassTrainingData();
387 guarantee(!ktd->has_holder() || ktd->holder() == holder, "");
388 if (ktd->has_holder()) {
389 return ktd;
390 } else {
391 ktd = nullptr;
392 }
393 }
394 if (need_data()) {
395 TrainingDataLocker l;
396 td = training_data_set()->find(&key);
397 if (td == nullptr) {
398 if (null_if_not_found) {
399 return nullptr;
400 }
401 ktd = KlassTrainingData::allocate(holder);
402 if (ktd == nullptr) {
403 return nullptr; // allocation failure
404 }
405 td = training_data_set()->install(ktd);
406 assert(ktd == td, "");
407 } else {
408 ktd = td->as_KlassTrainingData();
409 guarantee(ktd->holder() != nullptr, "null holder");
410 }
411 assert(ktd != nullptr, "");
412 guarantee(ktd->holder() == holder, "");
413 }
414 return ktd;
415 }
416
417 void KlassTrainingData::print_on(outputStream* st, bool name_only) const {
418 if (has_holder()) {
419 name()->print_symbol_on(st);
420 switch (holder()->init_state()) {
421 case InstanceKlass::allocated: st->print("[A]"); break;
422 case InstanceKlass::loaded: st->print("[D]"); break;
423 case InstanceKlass::linked: st->print("[L]"); break;
424 case InstanceKlass::being_initialized: st->print("[i]"); break;
425 case InstanceKlass::fully_initialized: break;
426 case InstanceKlass::initialization_error: st->print("[E]"); break;
427 default: fatal("unknown state: %d", holder()->init_state());
428 }
429 if (holder()->is_interface()) {
430 st->print("I");
431 }
432 } else {
433 st->print("[SYM]");
434 }
435 if (name_only) {
436 return;
437 }
438 if (_comp_deps.length() > 0) {
439 for (int i = 0, len = _comp_deps.length(); i < len; i++) {
440 st->print(" dep:");
441 _comp_deps.at(i)->print_on(st, true);
442 }
443 }
444 }
445
446 KlassTrainingData::KlassTrainingData(InstanceKlass* klass) : TrainingData(klass) {
447 assert(klass != nullptr, "");
448 // The OopHandle constructor will allocate a handle. We don't need to ever release it so we don't preserve
449 // the handle object.
450 OopHandle handle(Universe::vm_global(), klass->java_mirror());
451 _holder = klass;
452 assert(holder() == klass, "");
453 }
454
455 void KlassTrainingData::notice_fully_initialized() {
456 ResourceMark rm;
457 assert(has_holder(), "");
458 assert(holder()->is_initialized(), "wrong state: %s %s",
459 holder()->name()->as_C_string(), holder()->init_state_name());
460
461 TrainingDataLocker l; // Not a real lock if we don't collect the data,
462 // that's why we need the atomic decrement below.
463 for (int i = 0; i < comp_dep_count(); i++) {
464 comp_dep(i)->dec_init_deps_left_release(this);
465 }
466 holder()->set_has_init_deps_processed();
467 }
468
469 void TrainingData::init_dumptime_table(TRAPS) {
470 if (assembling_data() && !need_data()) {
471 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
472 _archived_training_data_dictionary.iterate_all([&](TrainingData* record) {
473 _dumptime_training_data_dictionary->append(record);
474 });
475 }
476 if (need_data()) {
477 _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
478 TrainingDataLocker l;
479 TrainingDataLocker::snapshot();
480 ResourceMark rm;
481 Visitor visitor(training_data_set()->size());
482 training_data_set()->iterate([&](TrainingData* td) {
483 td->prepare(visitor);
484 if (!td->is_CompileTrainingData()) {
485 _dumptime_training_data_dictionary->append(td);
486 }
487 });
488 }
489
490 RecompilationSchedule::prepare(CHECK);
491
492 if (AOTVerifyTrainingData) {
493 TrainingData::verify();
494 }
495 }
496
497 void TrainingData::iterate_roots(MetaspaceClosure* it) {
498 if (_dumptime_training_data_dictionary != nullptr) {
499 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
500 _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
501 }
502 }
503 RecompilationSchedule::iterate_roots(it);
504 }
505
506 void TrainingData::dump_training_data() {
507 if (_dumptime_training_data_dictionary != nullptr) {
508 CompactHashtableStats stats;
509 _archived_training_data_dictionary_for_dumping.reset();
510 CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
511 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
512 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
513 #ifdef ASSERT
514 for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
515 TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
516 assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
517 }
518 #endif // ASSERT
519 td = ArchiveBuilder::current()->get_buffered_addr(td);
520 uint hash = TrainingData::Key::cds_hash(td->key());
521 u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
522 writer.add(hash, delta);
523 }
524 writer.dump(&_archived_training_data_dictionary_for_dumping, "training data dictionary");
525 }
526 }
527
528 void TrainingData::cleanup_training_data() {
529 if (_dumptime_training_data_dictionary != nullptr) {
530 ResourceMark rm;
531 Visitor visitor(_dumptime_training_data_dictionary->length());
532 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534 td->cleanup(visitor);
535 }
536 // Throw away all elements with empty keys
537 int j = 0;
538 for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
539 TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
540 if (td->key()->is_empty()) {
541 continue;
542 }
543 if (i != j) { // no need to copy if it's the same
544 _dumptime_training_data_dictionary->at_put(j, td);
545 }
546 j++;
547 }
548 _dumptime_training_data_dictionary->trunc_to(j);
549 }
550 RecompilationSchedule::cleanup();
551 }
552
553 void KlassTrainingData::cleanup(Visitor& visitor) {
554 if (visitor.is_visited(this)) {
555 return;
556 }
557 visitor.visit(this);
558 if (has_holder()) {
559 bool is_excluded = !holder()->is_loaded();
560 if (CDSConfig::is_at_aot_safepoint()) {
561 // Check for AOT exclusion only at AOT safe point.
562 is_excluded |= SystemDictionaryShared::should_be_excluded(holder());
563 }
564 if (is_excluded) {
565 ResourceMark rm;
566 log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
567 _holder = nullptr;
568 key()->make_empty();
569 }
570 }
571 for (int i = 0; i < _comp_deps.length(); i++) {
572 _comp_deps.at(i)->cleanup(visitor);
573 }
574 }
575
576 void MethodTrainingData::cleanup(Visitor& visitor) {
577 if (visitor.is_visited(this)) {
578 return;
579 }
580 visitor.visit(this);
581 if (has_holder()) {
582 if (CDSConfig::is_at_aot_safepoint() && SystemDictionaryShared::should_be_excluded(holder()->method_holder())) {
583 // Check for AOT exclusion only at AOT safe point.
584 log_debug(aot, training)("Cleanup MTD %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
585 if (_final_profile != nullptr && _final_profile->method() != _holder) {
586 log_warning(aot, training)("Stale MDO for %s::%s", name()->as_klass_external_name(), signature()->as_utf8());
587 }
588 _final_profile = nullptr;
589 _final_counters = nullptr;
590 _holder = nullptr;
591 key()->make_empty();
592 }
593 }
594 for (int i = 0; i < CompLevel_count - 1; i++) {
595 CompileTrainingData* ctd = _last_toplevel_compiles[i];
596 if (ctd != nullptr) {
597 ctd->cleanup(visitor);
598 }
599 }
600 }
601
602 void KlassTrainingData::verify() {
603 for (int i = 0; i < comp_dep_count(); i++) {
604 CompileTrainingData* ctd = comp_dep(i);
605 if (!ctd->_init_deps.contains(this)) {
606 print_on(tty); tty->cr();
607 ctd->print_on(tty); tty->cr();
608 }
609 guarantee(ctd->_init_deps.contains(this), "");
610 }
611 }
612
613 void MethodTrainingData::verify(bool verify_dep_counter) {
614 iterate_compiles([&](CompileTrainingData* ctd) {
615 ctd->verify(verify_dep_counter);
616 });
617 }
618
619 void CompileTrainingData::verify(bool verify_dep_counter) {
620 for (int i = 0; i < init_dep_count(); i++) {
621 KlassTrainingData* ktd = init_dep(i);
622 if (ktd->has_holder() && ktd->holder()->defined_by_other_loaders()) {
623 LogStreamHandle(Info, training) log;
624 if (log.is_enabled()) {
625 ResourceMark rm;
626 log.print("CTD "); print_value_on(&log);
627 log.print(" depends on unregistered class %s", ktd->holder()->name()->as_C_string());
628 }
629 }
630 if (!ktd->_comp_deps.contains(this)) {
631 print_on(tty); tty->cr();
632 ktd->print_on(tty); tty->cr();
633 }
634 guarantee(ktd->_comp_deps.contains(this), "");
635 }
636
637 if (verify_dep_counter) {
638 int init_deps_left1 = init_deps_left_acquire();
639 int init_deps_left2 = compute_init_deps_left();
640
641 bool invariant = (init_deps_left1 >= init_deps_left2);
642 if (!invariant) {
643 print_on(tty);
644 tty->cr();
645 }
646 guarantee(invariant, "init deps invariant violation: %d >= %d", init_deps_left1, init_deps_left2);
647 }
648 }
649
650 void CompileTrainingData::cleanup(Visitor& visitor) {
651 if (visitor.is_visited(this)) {
652 return;
653 }
654 visitor.visit(this);
655 method()->cleanup(visitor);
656 }
657
658 void TrainingData::serialize(SerializeClosure* soc) {
659 if (soc->writing()) {
660 _archived_training_data_dictionary_for_dumping.serialize_header(soc);
661 } else {
662 _archived_training_data_dictionary.serialize_header(soc);
663 }
664 RecompilationSchedule::serialize(soc);
665 }
666
667 class TrainingDataPrinter : StackObj {
668 outputStream* _st;
669 int _index;
670 public:
671 TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
672 void do_value(TrainingData* td) {
673 const char* type = (td->is_KlassTrainingData() ? "K" :
674 td->is_MethodTrainingData() ? "M" :
675 td->is_CompileTrainingData() ? "C" : "?");
676 _st->print("%4d: %p %s ", _index++, td, type);
677 td->print_on(_st);
678 _st->cr();
679 if (td->is_KlassTrainingData()) {
680 td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
681 ResourceMark rm;
682 _st->print_raw(" C ");
683 ctd->print_on(_st);
684 _st->cr();
685 });
686 } else if (td->is_MethodTrainingData()) {
687 td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
688 ResourceMark rm;
689 _st->print_raw(" C ");
690 ctd->print_on(_st);
691 _st->cr();
692 });
693 } else if (td->is_CompileTrainingData()) {
694 // ?
695 }
696 }
697 };
698
699 void TrainingData::print_archived_training_data_on(outputStream* st) {
700 st->print_cr("Archived TrainingData Dictionary");
701 TrainingDataPrinter tdp(st);
702 TrainingDataLocker::initialize();
703 _archived_training_data_dictionary.iterate_all(&tdp);
704 RecompilationSchedule::print_archived_training_data_on(st);
705 }
706
707 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
708 iter->push(const_cast<Metadata**>(&_meta));
709 }
710
711 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
712 _key.metaspace_pointers_do(iter);
713 }
714
715 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
716 return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
717 }
718
719 uint TrainingData::Key::cds_hash(const Key* const& k) {
720 return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
721 }
722
723 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
724 assert(!need_data(), "Should be used only in read-only mode");
725 // For this to work, all components of the key must be in shared metaspace.
726 if (!TrainingData::Key::can_compute_cds_hash(k) || _archived_training_data_dictionary.empty()) {
727 return nullptr;
728 }
729 uint hash = TrainingData::Key::cds_hash(k);
730 TrainingData* td = _archived_training_data_dictionary.lookup(k, hash, -1 /*unused*/);
731 if (td != nullptr) {
732 if ((td->is_KlassTrainingData() && td->as_KlassTrainingData()->has_holder()) ||
733 (td->is_MethodTrainingData() && td->as_MethodTrainingData()->has_holder())) {
734 return td;
735 } else {
736 ShouldNotReachHere();
737 }
738 }
739 return nullptr;
740 }
741
742 template <typename T>
743 void TrainingData::DepList<T>::metaspace_pointers_do(MetaspaceClosure* iter) {
744 iter->push(&_deps);
745 }
746
747 void KlassTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
748 log_trace(aot, training)("Iter(KlassTrainingData): %p", this);
749 TrainingData::metaspace_pointers_do(iter);
750 _comp_deps.metaspace_pointers_do(iter);
751 iter->push(&_holder);
752 }
753
754 void MethodTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
755 log_trace(aot, training)("Iter(MethodTrainingData): %p", this);
756 TrainingData::metaspace_pointers_do(iter);
757 iter->push(&_klass);
758 iter->push((Method**)&_holder);
759 for (int i = 0; i < CompLevel_count - 1; i++) {
760 iter->push(&_last_toplevel_compiles[i]);
761 }
762 iter->push(&_final_profile);
763 iter->push(&_final_counters);
764 }
765
766 void CompileTrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
767 log_trace(aot, training)("Iter(CompileTrainingData): %p", this);
768 TrainingData::metaspace_pointers_do(iter);
769 _init_deps.metaspace_pointers_do(iter);
770 _ci_records.metaspace_pointers_do(iter);
771 iter->push(&_method);
772 }
773
774 template <typename T>
775 void TrainingData::DepList<T>::prepare() {
776 if (_deps == nullptr && _deps_dyn != nullptr) {
777 int len = _deps_dyn->length();
778 _deps = MetadataFactory::new_array_from_c_heap<T>(len, mtClassShared);
779 for (int i = 0; i < len; i++) {
780 _deps->at_put(i, _deps_dyn->at(i)); // copy
781 }
782 _deps_dyn = nullptr;
783 }
784 }
785
786 void KlassTrainingData::remove_unshareable_info() {
787 TrainingData::remove_unshareable_info();
788 _comp_deps.remove_unshareable_info();
789 }
790
791 void MethodTrainingData::remove_unshareable_info() {
792 TrainingData::remove_unshareable_info();
793 if (_final_counters != nullptr) {
794 _final_counters->remove_unshareable_info();
795 }
796 if (_final_profile != nullptr) {
797 _final_profile->remove_unshareable_info();
798 }
799 }
800
801 void CompileTrainingData::remove_unshareable_info() {
802 TrainingData::remove_unshareable_info();
803 _init_deps.remove_unshareable_info();
804 _ci_records.remove_unshareable_info();
805 _init_deps_left = compute_init_deps_left(true);
806 }