1 /*
2 * Copyright (c) 2025, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OOPS_TRAININGDATA_HPP
26 #define SHARE_OOPS_TRAININGDATA_HPP
27
28 #include "cds/cdsConfig.hpp"
29 #include "classfile/compactHashtable.hpp"
30 #include "compiler/compiler_globals.hpp"
31 #include "compiler/compilerDefinitions.hpp"
32 #include "memory/allocation.hpp"
33 #include "memory/metaspaceClosure.hpp"
34 #include "oops/instanceKlass.hpp"
35 #include "oops/method.hpp"
36 #include "oops/objArrayKlass.hpp"
37 #include "runtime/handles.hpp"
38 #include "runtime/mutexLocker.hpp"
39 #include "utilities/count_leading_zeros.hpp"
40 #include "utilities/resizableHashTable.hpp"
41
42 class ciEnv;
43 class ciBaseObject;
44 class CompileTask;
45 class CompileTrainingData;
46 class KlassTrainingData;
47 class MethodTrainingData;
48
49 // Base class for all the training data varieties
50 class TrainingData : public Metadata {
51 friend KlassTrainingData;
52 friend MethodTrainingData;
53 friend CompileTrainingData;
54 public:
55 // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a
56 // pointer to a metaspace object the TD is associated with. For example,
57 // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method.
58 // The utility of the these hash tables is to be able to find a TD object for a given metaspace
59 // metaspace object.
60 class Key {
61 mutable Metadata* _meta;
62 // These guys can get to my constructors:
63 friend TrainingData;
64 friend KlassTrainingData;
65 friend MethodTrainingData;
66 friend CompileTrainingData;
67
68 // The empty key
69 Key() : _meta(nullptr) { }
70 bool is_empty() const { return _meta == nullptr; }
71 public:
72 Key(Metadata* meta) : _meta(meta) { }
73
74 static bool can_compute_cds_hash(const Key* const& k);
75 static uint cds_hash(const Key* const& k);
76 static unsigned hash(const Key* const& k) {
77 return primitive_hash(k->meta());
78 }
79 static bool equals(const Key* const& k1, const Key* const& k2) {
80 return k1->meta() == k2->meta();
81 }
82 static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) {
83 return equals(value->key(), key);
84 }
85 int cmp(const Key* that) const {
86 auto m1 = this->meta();
87 auto m2 = that->meta();
88 if (m1 < m2) return -1;
89 if (m1 > m2) return +1;
90 return 0;
91 }
92 Metadata* meta() const { return _meta; }
93 void metaspace_pointers_do(MetaspaceClosure *iter);
94 void make_empty() const { _meta = nullptr; }
95 };
96
97 // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures.
98 // It supports recursive locking and a read-only mode (in which case no locks are taken).
99 // It is also a part of the TD collection termination protocol (see the "snapshot" field).
100 class TrainingDataLocker {
101 #if INCLUDE_CDS
102 static volatile bool _snapshot; // If true we're not allocating new training data
103 #endif
104 static int _lock_mode;
105 const bool _recursive;
106 static void lock() {
107 #if INCLUDE_CDS
108 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
109 if (_lock_mode > 0) {
110 TrainingData_lock->lock_without_safepoint_check();
111 }
112 #endif
113 }
114 static void unlock() {
115 #if INCLUDE_CDS
116 if (_lock_mode > 0) {
117 TrainingData_lock->unlock();
118 }
119 #endif
120 }
121 static bool safely_locked() {
122 #if INCLUDE_CDS
123 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
124 if (_lock_mode > 0) {
125 return is_self_locked();
126 } else {
127 return true;
128 }
129 #else
130 return true;
131 #endif
132 }
133 static bool is_self_locked() {
134 return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false);
135 }
136
137 public:
138 static void snapshot() {
139 #if INCLUDE_CDS
140 assert_locked();
141 _snapshot = true;
142 #endif
143 }
144 static bool can_add() {
145 #if INCLUDE_CDS
146 assert_locked();
147 return !_snapshot;
148 #else
149 return false;
150 #endif
151 }
152 static void initialize() {
153 #if INCLUDE_CDS
154 _lock_mode = need_data() ? +1 : -1; // if -1, we go lock-free
155 #endif
156 }
157 static void assert_locked_or_snapshotted() {
158 #if INCLUDE_CDS
159 assert(safely_locked() || _snapshot, "use under TrainingDataLocker or after snapshot");
160 #endif
161 }
162 static void assert_locked() {
163 assert(safely_locked(), "use under TrainingDataLocker");
164 }
165 static void assert_can_add() {
166 assert(can_add(), "Cannot add TrainingData objects");
167 }
168 TrainingDataLocker() : _recursive(is_self_locked()) {
169 if (!_recursive) {
170 lock();
171 }
172 }
173 ~TrainingDataLocker() {
174 if (!_recursive) {
175 unlock();
176 }
177 }
178 };
179
180 // A set of TD objects that we collect during the training run.
181 class TrainingDataSet {
182 friend TrainingData;
183 ResizeableHashTable<const Key*, TrainingData*,
184 AnyObj::C_HEAP, MemTag::mtCompiler,
185 &TrainingData::Key::hash,
186 &TrainingData::Key::equals>
187 _table;
188
189 public:
190 template<typename... Arg>
191 TrainingDataSet(Arg... arg)
192 : _table(arg...) {
193 }
194 TrainingData* find(const Key* key) const {
195 TrainingDataLocker::assert_locked();
196 if (TrainingDataLocker::can_add()) {
197 auto res = _table.get(key);
198 return res == nullptr ? nullptr : *res;
199 }
200 return nullptr;
201 }
202 bool remove(const Key* key) {
203 return _table.remove(key);
204 }
205 TrainingData* install(TrainingData* td) {
206 TrainingDataLocker::assert_locked();
207 TrainingDataLocker::assert_can_add();
208 auto key = td->key();
209 if (key->is_empty()) {
210 return td; // unkeyed TD not installed
211 }
212 bool created = false;
213 auto prior = _table.put_if_absent(key, td, &created);
214 if (prior == nullptr || *prior == td) {
215 return td;
216 }
217 assert(false, "no pre-existing elements allowed");
218 return *prior;
219 }
220 template<typename Function>
221 void iterate(Function fn) const { // lambda enabled API
222 return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); });
223 }
224 int size() const { return _table.number_of_entries(); }
225
226 void verify() const {
227 TrainingDataLocker::assert_locked();
228 iterate([&](TrainingData* td) { td->verify(); });
229 }
230 };
231
232 // A widget to ensure that we visit TD object only once (TD objects can have pointer to
233 // other TD object that are sometimes circular).
234 class Visitor {
235 ResizeableHashTable<TrainingData*, bool> _visited;
236 public:
237 Visitor(unsigned size) : _visited(size, 0x3fffffff) { }
238 bool is_visited(TrainingData* td) {
239 return _visited.contains(td);
240 }
241 void visit(TrainingData* td) {
242 bool created;
243 _visited.put_if_absent(td, &created);
244 }
245 };
246
247 typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary;
248 private:
249 Key _key;
250
251 // just forward all constructor arguments to the embedded key
252 template<typename... Arg>
253 TrainingData(Arg... arg)
254 : _key(arg...) { }
255
256 // Container for recording TD during training run
257 static TrainingDataSet _training_data_set;
258 // Containter for replaying the training data (read-only, populated from the AOT image)
259 static TrainingDataDictionary _archived_training_data_dictionary;
260 // Container used for writing the AOT image
261 static TrainingDataDictionary _archived_training_data_dictionary_for_dumping;
262 class DumpTimeTrainingDataInfo {
263 TrainingData* _training_data;
264 public:
265 DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {}
266 DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {}
267 void metaspace_pointers_do(MetaspaceClosure* it) {
268 it->push(&_training_data);
269 }
270 TrainingData* training_data() {
271 return _training_data;
272 }
273 };
274 typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary;
275 // A temporary container that is used to accumulate and filter TD during dumping
276 static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary;
277
278 static TrainingDataSet* training_data_set() { return &_training_data_set; }
279 static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; }
280
281 public:
282 // Returns the key under which this TD is installed, or else
283 // Key::EMPTY if it is not installed.
284 const Key* key() const { return &_key; }
285
286 static bool have_data() { return AOTReplayTraining; } // Going to read
287 static bool need_data() { return AOTRecordTraining; } // Going to write
288 static bool assembling_data() { return have_data() && CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes(); }
289
290 static bool is_klass_loaded(Klass* k) {
291 if (have_data()) {
292 // If we're running in AOT mode some classes may not be loaded yet
293 if (k->is_objArray_klass()) {
294 k = ObjArrayKlass::cast(k)->bottom_klass();
295 }
296 if (k->is_instance_klass()) {
297 return InstanceKlass::cast(k)->is_loaded();
298 }
299 }
300 return true;
301 }
302
303 template<typename Function>
304 static void iterate(Function fn) { // lambda enabled API
305 TrainingDataLocker l;
306 if (have_data() && !need_data()) {
307 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) { fn(td); });
308 }
309 if (need_data()) {
310 training_data_set()->iterate(fn);
311 }
312 }
313
314 virtual MethodTrainingData* as_MethodTrainingData() const { return nullptr; }
315 virtual KlassTrainingData* as_KlassTrainingData() const { return nullptr; }
316 virtual CompileTrainingData* as_CompileTrainingData() const { return nullptr; }
317 bool is_MethodTrainingData() const { return as_MethodTrainingData() != nullptr; }
318 bool is_KlassTrainingData() const { return as_KlassTrainingData() != nullptr; }
319 bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; }
320
321 virtual void prepare(Visitor& visitor) = 0;
322 virtual void cleanup(Visitor& visitor) = 0;
323
324 static void initialize() NOT_CDS_RETURN;
325
326 static void verify() NOT_CDS_RETURN;
327
328 // Widget for recording dependencies, as an N-to-M graph relation,
329 // possibly cyclic.
330 template<typename E>
331 class DepList : public StackObj {
332 static const int INITIAL_CAPACITY = 10;
333
334 GrowableArrayCHeap<E, mtCompiler>* _deps_dyn;
335 Array<E>* _deps;
336
337 void copy_on_write_if_necessary() {
338 TrainingDataLocker::assert_locked_or_snapshotted();
339 if (_deps != nullptr && _deps_dyn == nullptr) {
340 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(length() + INITIAL_CAPACITY);
341 for (int i = 0; _deps->length(); i++) {
342 _deps_dyn->append(_deps->at(i));
343 }
344 _deps = nullptr;
345 }
346 }
347 public:
348 DepList() {
349 _deps_dyn = nullptr;
350 _deps = nullptr;
351 }
352
353 int length() const {
354 TrainingDataLocker::assert_locked_or_snapshotted();
355 return (_deps_dyn != nullptr ? _deps_dyn->length()
356 : _deps != nullptr ? _deps->length()
357 : 0);
358 }
359 E at(int i) const {
360 TrainingDataLocker::assert_locked_or_snapshotted();
361 assert(i >= 0 && i < length(), "oob");
362 if (_deps_dyn != nullptr) {
363 return _deps_dyn->at(i);
364 } else if (_deps != nullptr) {
365 return _deps->at(i);
366 } else ShouldNotReachHere();
367 }
368 bool append_if_missing(E dep) {
369 TrainingDataLocker::assert_can_add();
370 copy_on_write_if_necessary();
371 if (_deps_dyn == nullptr) {
372 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(INITIAL_CAPACITY);
373 _deps_dyn->append(dep);
374 return true;
375 } else {
376 return _deps_dyn->append_if_missing(dep);
377 }
378 }
379 bool remove_if_existing(E dep) {
380 TrainingDataLocker::assert_can_add();
381 copy_on_write_if_necessary();
382 if (_deps_dyn != nullptr) {
383 return _deps_dyn->remove_if_existing(dep);
384 }
385 return false;
386 }
387 void clear() {
388 TrainingDataLocker::assert_can_add();
389 if (_deps_dyn != nullptr) {
390 _deps_dyn->clear();
391 }
392 _deps = nullptr;
393 }
394 void append(E dep) {
395 TrainingDataLocker::assert_can_add();
396 copy_on_write_if_necessary();
397 if (_deps_dyn == nullptr) {
398 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(INITIAL_CAPACITY);
399 }
400 _deps_dyn->append(dep);
401 }
402 bool contains(E dep) {
403 TrainingDataLocker::assert_locked();
404 for (int i = 0; i < length(); i++) {
405 if (dep == at(i)) {
406 return true; // found
407 }
408 }
409 return false; // not found
410 }
411
412 #if INCLUDE_CDS
413 void remove_unshareable_info() {
414 _deps_dyn = nullptr;
415 }
416 #endif
417 void prepare();
418 void metaspace_pointers_do(MetaspaceClosure *iter);
419 };
420
421 virtual void metaspace_pointers_do(MetaspaceClosure *iter);
422
423 static void init_dumptime_table(TRAPS);
424
425 #if INCLUDE_CDS
426 virtual void remove_unshareable_info() {}
427 static void iterate_roots(MetaspaceClosure* it);
428 static void dump_training_data();
429 static void cleanup_training_data();
430 static void serialize(SerializeClosure* soc);
431 static void print_archived_training_data_on(outputStream* st);
432 static TrainingData* lookup_archived_training_data(const Key* k);
433 #endif
434
435 template<typename TrainingDataType, typename... ArgTypes>
436 static TrainingDataType* allocate(ArgTypes... args) {
437 assert(need_data() || have_data(), "");
438 if (TrainingDataLocker::can_add()) {
439 return new (mtClassShared) TrainingDataType(args...);
440 }
441 return nullptr;
442 }
443 };
444
445 // Training data that is associated with an InstanceKlass
446 class KlassTrainingData : public TrainingData {
447 friend TrainingData;
448 friend CompileTrainingData;
449
450 // Used by CDS. These classes need to access the private default constructor.
451 template <class T> friend class CppVtableTesterA;
452 template <class T> friend class CppVtableTesterB;
453 template <class T> friend class CppVtableCloner;
454
455 // cross-link to live klass, or null if not loaded or encountered yet
456 InstanceKlass* _holder;
457
458 DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me
459
460 KlassTrainingData();
461 KlassTrainingData(InstanceKlass* klass);
462
463 int comp_dep_count() const {
464 TrainingDataLocker::assert_locked();
465 return _comp_deps.length();
466 }
467 CompileTrainingData* comp_dep(int i) const {
468 TrainingDataLocker::assert_locked();
469 return _comp_deps.at(i);
470 }
471 void add_comp_dep(CompileTrainingData* ctd) {
472 TrainingDataLocker::assert_locked();
473 _comp_deps.append_if_missing(ctd);
474 }
475 void remove_comp_dep(CompileTrainingData* ctd) {
476 TrainingDataLocker::assert_locked();
477 _comp_deps.remove_if_existing(ctd);
478 }
479 public:
480 Symbol* name() const {
481 precond(has_holder());
482 return holder()->name();
483 }
484 bool has_holder() const { return _holder != nullptr; }
485 InstanceKlass* holder() const { return _holder; }
486
487 static KlassTrainingData* make(InstanceKlass* holder,
488 bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr);
489 static KlassTrainingData* find(InstanceKlass* holder) {
490 return make(holder, true);
491 }
492 virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); };
493
494 void notice_fully_initialized() NOT_CDS_RETURN;
495
496 void print_on(outputStream* st, bool name_only) const;
497 virtual void print_on(outputStream* st) const { print_on(st, false); }
498 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
499
500 virtual void prepare(Visitor& visitor);
501 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
502
503 MetaspaceObj::Type type() const {
504 return KlassTrainingDataType;
505 }
506
507 #if INCLUDE_CDS
508 virtual void remove_unshareable_info();
509 #endif
510
511 void metaspace_pointers_do(MetaspaceClosure *iter);
512
513 int size() const {
514 return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord);
515 }
516
517 const char* internal_name() const {
518 return "{ klass training data }";
519 };
520
521 void verify();
522
523 static KlassTrainingData* allocate(InstanceKlass* holder) {
524 return TrainingData::allocate<KlassTrainingData>(holder);
525 }
526
527 template<typename Function>
528 void iterate_comp_deps(Function fn) const { // lambda enabled API
529 TrainingDataLocker l;
530 for (int i = 0; i < comp_dep_count(); i++) {
531 fn(comp_dep(i));
532 }
533 }
534 };
535
536 // Information about particular JIT tasks.
537 class CompileTrainingData : public TrainingData {
538 friend TrainingData;
539 friend KlassTrainingData;
540
541 // Used by CDS. These classes need to access the private default constructor.
542 template <class T> friend class CppVtableTesterA;
543 template <class T> friend class CppVtableTesterB;
544 template <class T> friend class CppVtableCloner;
545
546 MethodTrainingData* _method;
547 const short _level;
548 const int _compile_id;
549
550 // Size of nmethod code during training
551 int _inline_instructions_size;
552
553 // classes that should be initialized before this JIT task runs
554 DepList<KlassTrainingData*> _init_deps;
555 // Number of uninitialized classes left, when it's 0, all deps are satisfied
556 volatile int _init_deps_left;
557
558 public:
559 // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record
560 // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them.
561 class ciRecords {
562 template <typename... Ts> class Arguments {
563 public:
564 bool operator==(const Arguments<>&) const { return true; }
565 void metaspace_pointers_do(MetaspaceClosure *iter) { }
566 };
567 template <typename T, typename... Ts> class Arguments<T, Ts...> {
568 private:
569 T _first;
570 Arguments<Ts...> _remaining;
571
572 public:
573 constexpr Arguments(const T& first, const Ts&... remaining) noexcept
574 : _first(first), _remaining(remaining...) {}
575 constexpr Arguments() noexcept : _first(), _remaining() {}
576 bool operator==(const Arguments<T, Ts...>& that) const {
577 return _first == that._first && _remaining == that._remaining;
578 }
579 template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)>
580 void metaspace_pointers_do(MetaspaceClosure *iter) {
581 iter->push(&_first);
582 _remaining.metaspace_pointers_do(iter);
583 }
584 template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))>
585 void metaspace_pointers_do(MetaspaceClosure *iter) {
586 _remaining.metaspace_pointers_do(iter);
587 }
588 };
589
590 template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj {
591 public:
592 class OptionalReturnType {
593 bool _valid;
594 ReturnType _result;
595 public:
596 OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {}
597 bool is_valid() const { return _valid; }
598 ReturnType result() const { return _result; }
599 };
600 private:
601 typedef Arguments<Args...> ArgumentsType;
602 class Record : public MetaspaceObj {
603 ReturnType _result;
604 ArgumentsType _arguments;
605 public:
606 Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {}
607 Record() { }
608 ReturnType result() const { return _result; }
609 ArgumentsType arguments() const { return _arguments; }
610 bool operator==(const Record& that) { return _arguments == that._arguments; }
611 void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); }
612 };
613 DepList<Record> _data;
614 public:
615 OptionalReturnType find(const Args&... args) {
616 TrainingDataLocker l;
617 ArgumentsType a(args...);
618 for (int i = 0; i < _data.length(); i++) {
619 if (_data.at(i).arguments() == a) {
620 return OptionalReturnType(true, _data.at(i).result());
621 }
622 }
623 return OptionalReturnType(false, ReturnType());
624 }
625 void append_if_missing(const ReturnType& result, const Args&... args) {
626 TrainingDataLocker l;
627 if (l.can_add()) {
628 _data.append_if_missing(Record(result, ArgumentsType(args...)));
629 }
630 }
631 #if INCLUDE_CDS
632 void remove_unshareable_info() { _data.remove_unshareable_info(); }
633 #endif
634 void prepare() {
635 _data.prepare();
636 }
637 void metaspace_pointers_do(MetaspaceClosure *iter) {
638 _data.metaspace_pointers_do(iter);
639 }
640 };
641
642
643 public:
644 // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to
645 // compile methods in a different order than in the training run.
646 typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type;
647 ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size;
648 #if INCLUDE_CDS
649 void remove_unshareable_info() {
650 ciMethod__inline_instructions_size.remove_unshareable_info();
651 }
652 #endif
653 void prepare() {
654 ciMethod__inline_instructions_size.prepare();
655 }
656 void metaspace_pointers_do(MetaspaceClosure *iter) {
657 ciMethod__inline_instructions_size.metaspace_pointers_do(iter);
658 }
659 };
660
661 private:
662 ciRecords _ci_records;
663
664 CompileTrainingData();
665 CompileTrainingData(MethodTrainingData* mtd,
666 int level,
667 int compile_id)
668 : TrainingData(), // empty key
669 _method(mtd), _level(level), _compile_id(compile_id), _inline_instructions_size(0), _init_deps_left(0) { }
670 public:
671 ciRecords& ci_records() { return _ci_records; }
672 static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr);
673
674 virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); };
675
676 MethodTrainingData* method() const { return _method; }
677
678 int level() const { return _level; }
679
680 int compile_id() const { return _compile_id; }
681
682 int inline_instructions_size() const { return _inline_instructions_size; }
683 void set_inline_instructions_size(int size) { _inline_instructions_size = size; }
684
685 int init_dep_count() const {
686 TrainingDataLocker::assert_locked();
687 return _init_deps.length();
688 }
689 KlassTrainingData* init_dep(int i) const {
690 TrainingDataLocker::assert_locked();
691 return _init_deps.at(i);
692 }
693 void add_init_dep(KlassTrainingData* ktd) {
694 TrainingDataLocker::assert_locked();
695 ktd->add_comp_dep(this);
696 _init_deps.append_if_missing(ktd);
697 }
698 void clear_init_deps() {
699 TrainingDataLocker::assert_locked();
700 for (int i = 0; i < _init_deps.length(); i++) {
701 _init_deps.at(i)->remove_comp_dep(this);
702 }
703 _init_deps.clear();
704 }
705 void dec_init_deps_left_release(KlassTrainingData* ktd);
706 int init_deps_left_acquire() const {
707 return AtomicAccess::load_acquire(&_init_deps_left);
708 }
709 uint compute_init_deps_left(bool count_initialized = false);
710
711 void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN;
712
713 // The JIT looks at classes and objects too and can depend on their state.
714 // These simple calls just report the *possibility* of an observation.
715 void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN;
716
717 virtual void prepare(Visitor& visitor);
718 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
719
720 void print_on(outputStream* st, bool name_only) const;
721 virtual void print_on(outputStream* st) const { print_on(st, false); }
722 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
723
724 #if INCLUDE_CDS
725 virtual void remove_unshareable_info();
726 #endif
727
728 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
729 virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; }
730
731 virtual const char* internal_name() const {
732 return "{ compile training data }";
733 };
734
735 virtual int size() const {
736 return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord);
737 }
738
739 void verify(bool verify_dep_counter);
740
741 static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) {
742 return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id);
743 }
744 };
745
746 // Record information about a method at the time compilation is requested.
747 class MethodTrainingData : public TrainingData {
748 friend TrainingData;
749 friend CompileTrainingData;
750
751 // Used by CDS. These classes need to access the private default constructor.
752 template <class T> friend class CppVtableTesterA;
753 template <class T> friend class CppVtableTesterB;
754 template <class T> friend class CppVtableCloner;
755
756 KlassTrainingData* _klass;
757 Method* _holder;
758 CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1];
759 int _highest_top_level;
760 int _level_mask; // bit-set of all possible levels
761 bool _was_toplevel;
762 // metadata snapshots of final state:
763 MethodCounters* _final_counters;
764 MethodData* _final_profile;
765
766 int _invocation_count;
767 int _backedge_count;
768
769 MethodTrainingData();
770 MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) {
771 _klass = ktd;
772 _holder = method;
773 for (int i = 0; i < CompLevel_count - 1; i++) {
774 _last_toplevel_compiles[i] = nullptr;
775 }
776 _highest_top_level = CompLevel_none;
777 _level_mask = 0;
778 _was_toplevel = false;
779 _invocation_count = 0;
780 _backedge_count = 0;
781 }
782
783 static int level_mask(int level) {
784 return ((level & 0xF) != level ? 0 : 1 << level);
785 }
786 static CompLevel highest_level(int mask) {
787 if (mask == 0) return (CompLevel) 0;
788 int diff = (count_leading_zeros(level_mask(0)) - count_leading_zeros(mask));
789 return (CompLevel) diff;
790 }
791
792 public:
793 KlassTrainingData* klass() const { return _klass; }
794 bool has_holder() const { return _holder != nullptr; }
795 Method* holder() const { return _holder; }
796 bool only_inlined() const { return !_was_toplevel; }
797 bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; }
798 int highest_level() const { return highest_level(_level_mask); }
799 int highest_top_level() const { return _highest_top_level; }
800 MethodData* final_profile() const { return _final_profile; }
801 int invocation_count() const { return _invocation_count; }
802 int backedge_count() const { return _backedge_count; }
803
804 Symbol* name() const {
805 precond(has_holder());
806 return holder()->name();
807 }
808 Symbol* signature() const {
809 precond(has_holder());
810 return holder()->signature();
811 }
812
813 CompileTrainingData* last_toplevel_compile(int level) const {
814 if (level > CompLevel_none) {
815 return _last_toplevel_compiles[level - 1];
816 }
817 return nullptr;
818 }
819
820 CompileTrainingData* compile_data_for_aot_code(int level) const {
821 CompileTrainingData* ctd = last_toplevel_compile(level);
822 if (ctd == nullptr && level == CompLevel_limited_profile) {
823 // We compile CompLevel_limited_profile AOT code for CompLevel_full_profile
824 ctd = _last_toplevel_compiles[CompLevel_full_profile - 1];
825 }
826 return ctd;
827 }
828
829 void notice_compilation(int level, bool inlined = false) {
830 if (!inlined) {
831 _was_toplevel = true;
832 }
833 _level_mask |= level_mask(level);
834 }
835
836 void notice_toplevel_compilation(int level) {
837 _highest_top_level = MAX2(_highest_top_level, level);
838 }
839
840 static MethodTrainingData* make(const methodHandle& method,
841 bool null_if_not_found = false,
842 bool use_cache = true) NOT_CDS_RETURN_(nullptr);
843 static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); }
844 static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); }
845
846 virtual MethodTrainingData* as_MethodTrainingData() const {
847 return const_cast<MethodTrainingData*>(this);
848 };
849
850 void print_on(outputStream* st, bool name_only) const;
851 virtual void print_on(outputStream* st) const { print_on(st, false); }
852 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
853
854 virtual void prepare(Visitor& visitor);
855 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
856
857 template<typename Function>
858 void iterate_compiles(Function fn) const { // lambda enabled API
859 for (int i = 0; i < CompLevel_count - 1; i++) {
860 CompileTrainingData* ctd = _last_toplevel_compiles[i];
861 if (ctd != nullptr) {
862 fn(ctd);
863 }
864 }
865 }
866
867 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
868 virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; }
869
870 #if INCLUDE_CDS
871 virtual void remove_unshareable_info();
872 #endif
873
874 virtual int size() const {
875 return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord);
876 }
877
878 virtual const char* internal_name() const {
879 return "{ method training data }";
880 };
881
882 void verify(bool verify_dep_counter);
883
884 static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) {
885 return TrainingData::allocate<MethodTrainingData>(m, ktd);
886 }
887 };
888 #endif // SHARE_OOPS_TRAININGDATA_HPP