1 /*
2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OOPS_TRAININGDATA_HPP
26 #define SHARE_OOPS_TRAININGDATA_HPP
27
28 #include "cds/cdsConfig.hpp"
29 #include "classfile/compactHashtable.hpp"
30 #include "compiler/compiler_globals.hpp"
31 #include "compiler/compilerDefinitions.hpp"
32 #include "memory/allocation.hpp"
33 #include "memory/metaspaceClosure.hpp"
34 #include "oops/instanceKlass.hpp"
35 #include "oops/method.hpp"
36 #include "oops/objArrayKlass.hpp"
37 #include "runtime/handles.hpp"
38 #include "runtime/mutexLocker.hpp"
39 #include "utilities/count_leading_zeros.hpp"
40 #include "utilities/resizableHashTable.hpp"
41
42 class ciEnv;
43 class ciBaseObject;
44 class CompileTask;
45 class CompileTrainingData;
46 class KlassTrainingData;
47 class MethodTrainingData;
48
49 // Base class for all the training data varieties
50 class TrainingData : public Metadata {
51 friend KlassTrainingData;
52 friend MethodTrainingData;
53 friend CompileTrainingData;
54 public:
55 // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a
56 // pointer to a metaspace object the TD is associated with. For example,
57 // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method.
58 // The utility of the these hash tables is to be able to find a TD object for a given metaspace
59 // metaspace object.
60 class Key {
61 mutable Metadata* _meta;
62 // These guys can get to my constructors:
63 friend TrainingData;
64 friend KlassTrainingData;
65 friend MethodTrainingData;
66 friend CompileTrainingData;
67
68 // The empty key
69 Key() : _meta(nullptr) { }
70 bool is_empty() const { return _meta == nullptr; }
71 public:
72 Key(Metadata* meta) : _meta(meta) { }
73
74 static bool can_compute_cds_hash(const Key* const& k);
75 static uint cds_hash(const Key* const& k);
76 static unsigned hash(const Key* const& k) {
77 return primitive_hash(k->meta());
78 }
79 static bool equals(const Key* const& k1, const Key* const& k2) {
80 return k1->meta() == k2->meta();
81 }
82 static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) {
83 return equals(value->key(), key);
84 }
85 int cmp(const Key* that) const {
86 auto m1 = this->meta();
87 auto m2 = that->meta();
88 if (m1 < m2) return -1;
89 if (m1 > m2) return +1;
90 return 0;
91 }
92 Metadata* meta() const { return _meta; }
93 void metaspace_pointers_do(MetaspaceClosure *iter);
94 void make_empty() const { _meta = nullptr; }
95 };
96
97 // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures.
98 // It supports recursive locking and a read-only mode (in which case no locks are taken).
99 // It is also a part of the TD collection termination protocol (see the "snapshot" field).
100 class TrainingDataLocker {
101 #if INCLUDE_CDS
102 static volatile bool _snapshot; // If true we're not allocating new training data
103 #endif
104 static int _lock_mode;
105 const bool _recursive;
106 static void lock() {
107 #if INCLUDE_CDS
108 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
109 if (_lock_mode > 0) {
110 TrainingData_lock->lock_without_safepoint_check();
111 }
112 #endif
113 }
114 static void unlock() {
115 #if INCLUDE_CDS
116 if (_lock_mode > 0) {
117 TrainingData_lock->unlock();
118 }
119 #endif
120 }
121 static bool safely_locked() {
122 #if INCLUDE_CDS
123 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
124 if (_lock_mode > 0) {
125 return is_self_locked();
126 } else {
127 return true;
128 }
129 #else
130 return true;
131 #endif
132 }
133 static bool is_self_locked() {
134 return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false);
135 }
136
137 public:
138 static void snapshot() {
139 #if INCLUDE_CDS
140 assert_locked();
141 _snapshot = true;
142 #endif
143 }
144 static bool can_add() {
145 #if INCLUDE_CDS
146 assert_locked();
147 return !_snapshot;
148 #else
149 return false;
150 #endif
151 }
152 static void initialize() {
153 #if INCLUDE_CDS
154 _lock_mode = need_data() ? +1 : -1; // if -1, we go lock-free
155 #endif
156 }
157 static void assert_locked_or_snapshotted() {
158 #if INCLUDE_CDS
159 assert(safely_locked() || _snapshot, "use under TrainingDataLocker or after snapshot");
160 #endif
161 }
162 static void assert_locked() {
163 assert(safely_locked(), "use under TrainingDataLocker");
164 }
165 static void assert_can_add() {
166 assert(can_add(), "Cannot add TrainingData objects");
167 }
168 TrainingDataLocker() : _recursive(is_self_locked()) {
169 if (!_recursive) {
170 lock();
171 }
172 }
173 ~TrainingDataLocker() {
174 if (!_recursive) {
175 unlock();
176 }
177 }
178 };
179
180 // A set of TD objects that we collect during the training run.
181 class TrainingDataSet {
182 friend TrainingData;
183 ResizeableHashTable<const Key*, TrainingData*,
184 AnyObj::C_HEAP, MemTag::mtCompiler,
185 &TrainingData::Key::hash,
186 &TrainingData::Key::equals>
187 _table;
188
189 public:
190 template<typename... Arg>
191 TrainingDataSet(Arg... arg)
192 : _table(arg...) {
193 }
194 TrainingData* find(const Key* key) const {
195 TrainingDataLocker::assert_locked();
196 if (TrainingDataLocker::can_add()) {
197 auto res = _table.get(key);
198 return res == nullptr ? nullptr : *res;
199 }
200 return nullptr;
201 }
202 bool remove(const Key* key) {
203 return _table.remove(key);
204 }
205 TrainingData* install(TrainingData* td) {
206 TrainingDataLocker::assert_locked();
207 TrainingDataLocker::assert_can_add();
208 auto key = td->key();
209 if (key->is_empty()) {
210 return td; // unkeyed TD not installed
211 }
212 bool created = false;
213 auto prior = _table.put_if_absent(key, td, &created);
214 if (prior == nullptr || *prior == td) {
215 return td;
216 }
217 assert(false, "no pre-existing elements allowed");
218 return *prior;
219 }
220 template<typename Function>
221 void iterate(const Function& fn) const { // lambda enabled API
222 iterate(const_cast<Function&>(fn));
223 }
224 template<typename Function>
225 void iterate(Function& fn) const { // lambda enabled API
226 return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); });
227 }
228 int size() const { return _table.number_of_entries(); }
229
230 void verify() const {
231 TrainingDataLocker::assert_locked();
232 iterate([&](TrainingData* td) { td->verify(); });
233 }
234 };
235
236 // A widget to ensure that we visit TD object only once (TD objects can have pointer to
237 // other TD object that are sometimes circular).
238 class Visitor {
239 ResizeableHashTable<TrainingData*, bool> _visited;
240 public:
241 Visitor(unsigned size) : _visited(size, 0x3fffffff) { }
242 bool is_visited(TrainingData* td) {
243 return _visited.contains(td);
244 }
245 void visit(TrainingData* td) {
246 bool created;
247 _visited.put_if_absent(td, &created);
248 }
249 };
250
251 typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary;
252 private:
253 Key _key;
254
255 // just forward all constructor arguments to the embedded key
256 template<typename... Arg>
257 TrainingData(Arg... arg)
258 : _key(arg...) { }
259
260 // Container for recording TD during training run
261 static TrainingDataSet _training_data_set;
262 // Containter for replaying the training data (read-only, populated from the AOT image)
263 static TrainingDataDictionary _archived_training_data_dictionary;
264 // Container used for writing the AOT image
265 static TrainingDataDictionary _archived_training_data_dictionary_for_dumping;
266 class DumpTimeTrainingDataInfo {
267 TrainingData* _training_data;
268 public:
269 DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {}
270 DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {}
271 void metaspace_pointers_do(MetaspaceClosure* it) {
272 it->push(&_training_data);
273 }
274 TrainingData* training_data() {
275 return _training_data;
276 }
277 };
278 typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary;
279 // A temporary container that is used to accumulate and filter TD during dumping
280 static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary;
281
282 static TrainingDataSet* training_data_set() { return &_training_data_set; }
283 static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; }
284
285 public:
286 // Returns the key under which this TD is installed, or else
287 // Key::EMPTY if it is not installed.
288 const Key* key() const { return &_key; }
289
290 static bool have_data() { return AOTReplayTraining; } // Going to read
291 static bool need_data() { return AOTRecordTraining; } // Going to write
292 static bool assembling_data() { return have_data() && CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes(); }
293
294 static bool is_klass_loaded(Klass* k) {
295 if (have_data()) {
296 // If we're running in AOT mode some classes may not be loaded yet
297 if (k->is_objArray_klass()) {
298 k = ObjArrayKlass::cast(k)->bottom_klass();
299 }
300 if (k->is_instance_klass()) {
301 return InstanceKlass::cast(k)->is_loaded();
302 }
303 }
304 return true;
305 }
306
307 template<typename Function>
308 static void iterate(const Function& fn) { iterate(const_cast<Function&>(fn)); }
309
310 template<typename Function>
311 static void iterate(Function& fn) { // lambda enabled API
312 TrainingDataLocker l;
313 if (have_data() && !need_data()) {
314 archived_training_data_dictionary()->iterate_all([&](TrainingData* td) { fn(td); });
315 }
316 if (need_data()) {
317 training_data_set()->iterate(fn);
318 }
319 }
320
321 virtual MethodTrainingData* as_MethodTrainingData() const { return nullptr; }
322 virtual KlassTrainingData* as_KlassTrainingData() const { return nullptr; }
323 virtual CompileTrainingData* as_CompileTrainingData() const { return nullptr; }
324 bool is_MethodTrainingData() const { return as_MethodTrainingData() != nullptr; }
325 bool is_KlassTrainingData() const { return as_KlassTrainingData() != nullptr; }
326 bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; }
327
328 virtual void prepare(Visitor& visitor) = 0;
329 virtual void cleanup(Visitor& visitor) = 0;
330
331 static void initialize() NOT_CDS_RETURN;
332
333 static void verify() NOT_CDS_RETURN;
334
335 // Widget for recording dependencies, as an N-to-M graph relation,
336 // possibly cyclic.
337 template<typename E>
338 class DepList : public StackObj {
339 static const int INITIAL_CAPACITY = 10;
340
341 GrowableArrayCHeap<E, mtCompiler>* _deps_dyn;
342 Array<E>* _deps;
343
344 void copy_on_write_if_necessary() {
345 TrainingDataLocker::assert_locked_or_snapshotted();
346 if (_deps != nullptr && _deps_dyn == nullptr) {
347 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(length() + INITIAL_CAPACITY);
348 for (int i = 0; _deps->length(); i++) {
349 _deps_dyn->append(_deps->at(i));
350 }
351 _deps = nullptr;
352 }
353 }
354 public:
355 DepList() {
356 _deps_dyn = nullptr;
357 _deps = nullptr;
358 }
359
360 int length() const {
361 TrainingDataLocker::assert_locked_or_snapshotted();
362 return (_deps_dyn != nullptr ? _deps_dyn->length()
363 : _deps != nullptr ? _deps->length()
364 : 0);
365 }
366 E at(int i) const {
367 TrainingDataLocker::assert_locked_or_snapshotted();
368 assert(i >= 0 && i < length(), "oob");
369 if (_deps_dyn != nullptr) {
370 return _deps_dyn->at(i);
371 } else if (_deps != nullptr) {
372 return _deps->at(i);
373 } else ShouldNotReachHere();
374 }
375 bool append_if_missing(E dep) {
376 TrainingDataLocker::assert_can_add();
377 copy_on_write_if_necessary();
378 if (_deps_dyn == nullptr) {
379 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(INITIAL_CAPACITY);
380 _deps_dyn->append(dep);
381 return true;
382 } else {
383 return _deps_dyn->append_if_missing(dep);
384 }
385 }
386 bool remove_if_existing(E dep) {
387 TrainingDataLocker::assert_can_add();
388 copy_on_write_if_necessary();
389 if (_deps_dyn != nullptr) {
390 return _deps_dyn->remove_if_existing(dep);
391 }
392 return false;
393 }
394 void clear() {
395 TrainingDataLocker::assert_can_add();
396 if (_deps_dyn != nullptr) {
397 _deps_dyn->clear();
398 }
399 _deps = nullptr;
400 }
401 void append(E dep) {
402 TrainingDataLocker::assert_can_add();
403 copy_on_write_if_necessary();
404 if (_deps_dyn == nullptr) {
405 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(INITIAL_CAPACITY);
406 }
407 _deps_dyn->append(dep);
408 }
409 bool contains(E dep) {
410 TrainingDataLocker::assert_locked();
411 for (int i = 0; i < length(); i++) {
412 if (dep == at(i)) {
413 return true; // found
414 }
415 }
416 return false; // not found
417 }
418
419 #if INCLUDE_CDS
420 void remove_unshareable_info() {
421 _deps_dyn = nullptr;
422 }
423 #endif
424 void prepare();
425 void metaspace_pointers_do(MetaspaceClosure *iter);
426 };
427
428 virtual void metaspace_pointers_do(MetaspaceClosure *iter);
429
430 static void init_dumptime_table(TRAPS);
431
432 #if INCLUDE_CDS
433 virtual void remove_unshareable_info() {}
434 static void iterate_roots(MetaspaceClosure* it);
435 static void dump_training_data();
436 static void cleanup_training_data();
437 static void serialize(SerializeClosure* soc);
438 static void print_archived_training_data_on(outputStream* st);
439 static TrainingData* lookup_archived_training_data(const Key* k);
440 #endif
441
442 template<typename TrainingDataType, typename... ArgTypes>
443 static TrainingDataType* allocate(ArgTypes... args) {
444 assert(need_data() || have_data(), "");
445 if (TrainingDataLocker::can_add()) {
446 return new (mtClassShared) TrainingDataType(args...);
447 }
448 return nullptr;
449 }
450 };
451
452 // Training data that is associated with an InstanceKlass
453 class KlassTrainingData : public TrainingData {
454 friend TrainingData;
455 friend CompileTrainingData;
456
457 // Used by CDS. These classes need to access the private default constructor.
458 template <class T> friend class CppVtableTesterA;
459 template <class T> friend class CppVtableTesterB;
460 template <class T> friend class CppVtableCloner;
461
462 // cross-link to live klass, or null if not loaded or encountered yet
463 InstanceKlass* _holder;
464
465 DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me
466
467 KlassTrainingData();
468 KlassTrainingData(InstanceKlass* klass);
469
470 int comp_dep_count() const {
471 TrainingDataLocker::assert_locked();
472 return _comp_deps.length();
473 }
474 CompileTrainingData* comp_dep(int i) const {
475 TrainingDataLocker::assert_locked();
476 return _comp_deps.at(i);
477 }
478 void add_comp_dep(CompileTrainingData* ctd) {
479 TrainingDataLocker::assert_locked();
480 _comp_deps.append_if_missing(ctd);
481 }
482 void remove_comp_dep(CompileTrainingData* ctd) {
483 TrainingDataLocker::assert_locked();
484 _comp_deps.remove_if_existing(ctd);
485 }
486 public:
487 Symbol* name() const {
488 precond(has_holder());
489 return holder()->name();
490 }
491 bool has_holder() const { return _holder != nullptr; }
492 InstanceKlass* holder() const { return _holder; }
493
494 static KlassTrainingData* make(InstanceKlass* holder,
495 bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr);
496 static KlassTrainingData* find(InstanceKlass* holder) {
497 return make(holder, true);
498 }
499 virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); };
500
501 void notice_fully_initialized() NOT_CDS_RETURN;
502
503 void print_on(outputStream* st, bool name_only) const;
504 virtual void print_on(outputStream* st) const { print_on(st, false); }
505 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
506
507 virtual void prepare(Visitor& visitor);
508 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
509
510 MetaspaceObj::Type type() const {
511 return KlassTrainingDataType;
512 }
513
514 #if INCLUDE_CDS
515 virtual void remove_unshareable_info();
516 #endif
517
518 void metaspace_pointers_do(MetaspaceClosure *iter);
519
520 int size() const {
521 return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord);
522 }
523
524 const char* internal_name() const {
525 return "{ klass training data }";
526 };
527
528 void verify();
529
530 static KlassTrainingData* allocate(InstanceKlass* holder) {
531 return TrainingData::allocate<KlassTrainingData>(holder);
532 }
533
534 template<typename Function>
535 void iterate_comp_deps(Function fn) const { // lambda enabled API
536 TrainingDataLocker l;
537 for (int i = 0; i < comp_dep_count(); i++) {
538 fn(comp_dep(i));
539 }
540 }
541 };
542
543 // Information about particular JIT tasks.
544 class CompileTrainingData : public TrainingData {
545 friend TrainingData;
546 friend KlassTrainingData;
547
548 // Used by CDS. These classes need to access the private default constructor.
549 template <class T> friend class CppVtableTesterA;
550 template <class T> friend class CppVtableTesterB;
551 template <class T> friend class CppVtableCloner;
552
553 MethodTrainingData* _method;
554 const short _level;
555 const int _compile_id;
556
557 // classes that should be initialized before this JIT task runs
558 DepList<KlassTrainingData*> _init_deps;
559 // Number of uninitialized classes left, when it's 0, all deps are satisfied
560 volatile int _init_deps_left;
561
562 public:
563 // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record
564 // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them.
565 class ciRecords {
566 template <typename... Ts> class Arguments {
567 public:
568 bool operator==(const Arguments<>&) const { return true; }
569 void metaspace_pointers_do(MetaspaceClosure *iter) { }
570 };
571 template <typename T, typename... Ts> class Arguments<T, Ts...> {
572 private:
573 T _first;
574 Arguments<Ts...> _remaining;
575
576 public:
577 constexpr Arguments(const T& first, const Ts&... remaining) noexcept
578 : _first(first), _remaining(remaining...) {}
579 constexpr Arguments() noexcept : _first(), _remaining() {}
580 bool operator==(const Arguments<T, Ts...>& that) const {
581 return _first == that._first && _remaining == that._remaining;
582 }
583 template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)>
584 void metaspace_pointers_do(MetaspaceClosure *iter) {
585 iter->push(&_first);
586 _remaining.metaspace_pointers_do(iter);
587 }
588 template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))>
589 void metaspace_pointers_do(MetaspaceClosure *iter) {
590 _remaining.metaspace_pointers_do(iter);
591 }
592 };
593
594 template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj {
595 public:
596 class OptionalReturnType {
597 bool _valid;
598 ReturnType _result;
599 public:
600 OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {}
601 bool is_valid() const { return _valid; }
602 ReturnType result() const { return _result; }
603 };
604 private:
605 typedef Arguments<Args...> ArgumentsType;
606 class Record : public MetaspaceObj {
607 ReturnType _result;
608 ArgumentsType _arguments;
609 public:
610 Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {}
611 Record() { }
612 ReturnType result() const { return _result; }
613 ArgumentsType arguments() const { return _arguments; }
614 bool operator==(const Record& that) { return _arguments == that._arguments; }
615 void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); }
616 };
617 DepList<Record> _data;
618 public:
619 OptionalReturnType find(const Args&... args) {
620 TrainingDataLocker l;
621 ArgumentsType a(args...);
622 for (int i = 0; i < _data.length(); i++) {
623 if (_data.at(i).arguments() == a) {
624 return OptionalReturnType(true, _data.at(i).result());
625 }
626 }
627 return OptionalReturnType(false, ReturnType());
628 }
629 void append_if_missing(const ReturnType& result, const Args&... args) {
630 TrainingDataLocker l;
631 if (l.can_add()) {
632 _data.append_if_missing(Record(result, ArgumentsType(args...)));
633 }
634 }
635 #if INCLUDE_CDS
636 void remove_unshareable_info() { _data.remove_unshareable_info(); }
637 #endif
638 void prepare() {
639 _data.prepare();
640 }
641 void metaspace_pointers_do(MetaspaceClosure *iter) {
642 _data.metaspace_pointers_do(iter);
643 }
644 };
645
646
647 public:
648 // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to
649 // compile methods in a different order than in the training run.
650 typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type;
651 ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size;
652 #if INCLUDE_CDS
653 void remove_unshareable_info() {
654 ciMethod__inline_instructions_size.remove_unshareable_info();
655 }
656 #endif
657 void prepare() {
658 ciMethod__inline_instructions_size.prepare();
659 }
660 void metaspace_pointers_do(MetaspaceClosure *iter) {
661 ciMethod__inline_instructions_size.metaspace_pointers_do(iter);
662 }
663 };
664
665 private:
666 ciRecords _ci_records;
667
668 CompileTrainingData();
669 CompileTrainingData(MethodTrainingData* mtd,
670 int level,
671 int compile_id)
672 : TrainingData(), // empty key
673 _method(mtd), _level(level), _compile_id(compile_id), _init_deps_left(0) { }
674 public:
675 ciRecords& ci_records() { return _ci_records; }
676 static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr);
677
678 virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); };
679
680 MethodTrainingData* method() const { return _method; }
681
682 int level() const { return _level; }
683
684 int compile_id() const { return _compile_id; }
685
686 int init_dep_count() const {
687 TrainingDataLocker::assert_locked();
688 return _init_deps.length();
689 }
690 KlassTrainingData* init_dep(int i) const {
691 TrainingDataLocker::assert_locked();
692 return _init_deps.at(i);
693 }
694 void add_init_dep(KlassTrainingData* ktd) {
695 TrainingDataLocker::assert_locked();
696 ktd->add_comp_dep(this);
697 _init_deps.append_if_missing(ktd);
698 }
699 void clear_init_deps() {
700 TrainingDataLocker::assert_locked();
701 for (int i = 0; i < _init_deps.length(); i++) {
702 _init_deps.at(i)->remove_comp_dep(this);
703 }
704 _init_deps.clear();
705 }
706 void dec_init_deps_left_release(KlassTrainingData* ktd);
707 int init_deps_left_acquire() const {
708 return AtomicAccess::load_acquire(&_init_deps_left);
709 }
710 uint compute_init_deps_left(bool count_initialized = false);
711
712 void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN;
713
714 // The JIT looks at classes and objects too and can depend on their state.
715 // These simple calls just report the *possibility* of an observation.
716 void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN;
717
718 virtual void prepare(Visitor& visitor);
719 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
720
721 void print_on(outputStream* st, bool name_only) const;
722 virtual void print_on(outputStream* st) const { print_on(st, false); }
723 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
724
725 #if INCLUDE_CDS
726 virtual void remove_unshareable_info();
727 #endif
728
729 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
730 virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; }
731
732 virtual const char* internal_name() const {
733 return "{ compile training data }";
734 };
735
736 virtual int size() const {
737 return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord);
738 }
739
740 void verify(bool verify_dep_counter);
741
742 static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) {
743 return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id);
744 }
745 };
746
747 // Record information about a method at the time compilation is requested.
748 class MethodTrainingData : public TrainingData {
749 friend TrainingData;
750 friend CompileTrainingData;
751
752 // Used by CDS. These classes need to access the private default constructor.
753 template <class T> friend class CppVtableTesterA;
754 template <class T> friend class CppVtableTesterB;
755 template <class T> friend class CppVtableCloner;
756
757 KlassTrainingData* _klass;
758 Method* _holder;
759 CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1];
760 int _highest_top_level;
761 int _level_mask; // bit-set of all possible levels
762 bool _was_toplevel;
763 // metadata snapshots of final state:
764 MethodCounters* _final_counters;
765 MethodData* _final_profile;
766
767 int _invocation_count;
768 int _backedge_count;
769
770 MethodTrainingData();
771 MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) {
772 _klass = ktd;
773 _holder = method;
774 for (int i = 0; i < CompLevel_count - 1; i++) {
775 _last_toplevel_compiles[i] = nullptr;
776 }
777 _highest_top_level = CompLevel_none;
778 _level_mask = 0;
779 _was_toplevel = false;
780 _invocation_count = 0;
781 _backedge_count = 0;
782 }
783
784 static int level_mask(int level) {
785 return ((level & 0xF) != level ? 0 : 1 << level);
786 }
787 static CompLevel highest_level(int mask) {
788 if (mask == 0) return (CompLevel) 0;
789 int diff = (count_leading_zeros(level_mask(0)) - count_leading_zeros(mask));
790 return (CompLevel) diff;
791 }
792
793 public:
794 KlassTrainingData* klass() const { return _klass; }
795 bool has_holder() const { return _holder != nullptr; }
796 Method* holder() const { return _holder; }
797 bool only_inlined() const { return !_was_toplevel; }
798 bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; }
799 int highest_level() const { return highest_level(_level_mask); }
800 int highest_top_level() const { return _highest_top_level; }
801 MethodData* final_profile() const { return _final_profile; }
802 int invocation_count() const { return _invocation_count; }
803 int backedge_count() const { return _backedge_count; }
804
805 Symbol* name() const {
806 precond(has_holder());
807 return holder()->name();
808 }
809 Symbol* signature() const {
810 precond(has_holder());
811 return holder()->signature();
812 }
813
814 CompileTrainingData* last_toplevel_compile(int level) const {
815 if (level > CompLevel_none) {
816 return _last_toplevel_compiles[level - 1];
817 }
818 return nullptr;
819 }
820
821 void notice_compilation(int level, bool inlined = false) {
822 if (!inlined) {
823 _was_toplevel = true;
824 }
825 _level_mask |= level_mask(level);
826 }
827
828 void notice_toplevel_compilation(int level) {
829 _highest_top_level = MAX2(_highest_top_level, level);
830 }
831
832 static MethodTrainingData* make(const methodHandle& method,
833 bool null_if_not_found = false,
834 bool use_cache = true) NOT_CDS_RETURN_(nullptr);
835 static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); }
836 static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); }
837
838 virtual MethodTrainingData* as_MethodTrainingData() const {
839 return const_cast<MethodTrainingData*>(this);
840 };
841
842 void print_on(outputStream* st, bool name_only) const;
843 virtual void print_on(outputStream* st) const { print_on(st, false); }
844 virtual void print_value_on(outputStream* st) const { print_on(st, true); }
845
846 virtual void prepare(Visitor& visitor);
847 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
848
849 template<typename Function>
850 void iterate_compiles(Function fn) const { // lambda enabled API
851 for (int i = 0; i < CompLevel_count - 1; i++) {
852 CompileTrainingData* ctd = _last_toplevel_compiles[i];
853 if (ctd != nullptr) {
854 fn(ctd);
855 }
856 }
857 }
858
859 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
860 virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; }
861
862 #if INCLUDE_CDS
863 virtual void remove_unshareable_info();
864 #endif
865
866 virtual int size() const {
867 return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord);
868 }
869
870 virtual const char* internal_name() const {
871 return "{ method training data }";
872 };
873
874 void verify(bool verify_dep_counter);
875
876 static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) {
877 return TrainingData::allocate<MethodTrainingData>(m, ktd);
878 }
879 };
880 #endif // SHARE_OOPS_TRAININGDATA_HPP