1 /*
  2  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OOPS_TRAININGDATA_HPP
 26 #define SHARE_OOPS_TRAININGDATA_HPP
 27 
 28 #include "cds/cdsConfig.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "compiler/compiler_globals.hpp"
 32 #include "compiler/compilerDefinitions.hpp"
 33 #include "memory/allocation.hpp"
 34 #include "memory/metaspaceClosure.hpp"
 35 #include "oops/instanceKlass.hpp"
 36 #include "oops/method.hpp"
 37 #include "oops/objArrayKlass.hpp"
 38 #include "runtime/handles.hpp"
 39 #include "runtime/mutexLocker.hpp"
 40 #include "utilities/resizableHashTable.hpp"
 41 
 42 class ciEnv;
 43 class ciBaseObject;
 44 class CompileTask;
 45 class CompileTrainingData;
 46 class KlassTrainingData;
 47 class MethodTrainingData;
 48 
 49 // Base class for all the training data varieties
 50 class TrainingData : public Metadata {
 51   friend KlassTrainingData;
 52   friend MethodTrainingData;
 53   friend CompileTrainingData;
 54 public:
 55   // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a
 56   // pointer to a metaspace object the TD is associated with. For example,
 57   // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method.
 58   // The utility of the these hash tables is to be able to find a TD object for a given metaspace
 59   // metaspace object.
 60   class Key {
 61     mutable Metadata* _meta;
 62     // These guys can get to my constructors:
 63     friend TrainingData;
 64     friend KlassTrainingData;
 65     friend MethodTrainingData;
 66     friend CompileTrainingData;
 67 
 68     // The empty key
 69     Key() : _meta(nullptr) { }
 70     bool is_empty() const { return _meta == nullptr; }
 71   public:
 72     Key(Metadata* meta) : _meta(meta) { }
 73 
 74     static bool can_compute_cds_hash(const Key* const& k);
 75     static uint cds_hash(const Key* const& k);
 76     static unsigned hash(const Key* const& k) {
 77       return primitive_hash(k->meta());
 78     }
 79     static bool equals(const Key* const& k1, const Key* const& k2) {
 80       return k1->meta() == k2->meta();
 81     }
 82     static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) {
 83       return equals(value->key(), key);
 84     }
 85     int cmp(const Key* that) const {
 86       auto m1 = this->meta();
 87       auto m2 = that->meta();
 88       if (m1 < m2) return -1;
 89       if (m1 > m2) return +1;
 90       return 0;
 91     }
 92     Metadata* meta() const { return _meta; }
 93     void metaspace_pointers_do(MetaspaceClosure *iter);
 94     void make_empty() const { _meta = nullptr; }
 95   };
 96 
 97   // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures.
 98   // It supports recursive locking and a read-only mode (in which case no locks are taken).
 99   // It is also a part of the TD collection termination protocol (see the "snapshot" field).
100   class TrainingDataLocker {
101     static volatile bool _snapshot; // If true we're not allocating new training data
102     static int _lock_mode;
103     const bool _recursive;
104     static void lock() {
105 #if INCLUDE_CDS
106       assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
107       if (_lock_mode > 0) {
108         TrainingData_lock->lock_without_safepoint_check();
109       }
110 #endif
111     }
112     static void unlock() {
113 #if INCLUDE_CDS
114       if (_lock_mode > 0) {
115         TrainingData_lock->unlock();
116       }
117 #endif
118     }
119     static bool safely_locked() {
120 #if INCLUDE_CDS
121       assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
122       if (_lock_mode > 0) {
123         return is_self_locked();
124       } else {
125         return true;
126       }
127 #else
128       return true;
129 #endif
130     }
131     static bool is_self_locked() {
132       return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false);
133     }
134 
135   public:
136     static void snapshot() {
137 #if INCLUDE_CDS
138       assert_locked();
139       _snapshot = true;
140 #endif
141     }
142     static bool can_add() {
143 #if INCLUDE_CDS
144       assert_locked();
145       return !_snapshot;
146 #else
147       return false;
148 #endif
149     }
150     static void initialize() {
151 #if INCLUDE_CDS
152       _lock_mode = need_data() ? +1 : -1;   // if -1, we go lock-free
153 #endif
154     }
155     static void assert_locked() {
156       assert(safely_locked(), "use under TrainingDataLocker");
157     }
158     static void assert_can_add() {
159       assert(can_add(), "Cannot add TrainingData objects");
160     }
161     TrainingDataLocker() : _recursive(is_self_locked()) {
162       if (!_recursive) {
163         lock();
164       }
165     }
166     ~TrainingDataLocker() {
167       if (!_recursive) {
168         unlock();
169       }
170     }
171   };
172 
173   // A set of TD objects that we collect during the training run.
174   class TrainingDataSet {
175     friend TrainingData;
176     ResizeableHashTable<const Key*, TrainingData*,
177                                 AnyObj::C_HEAP, MemTag::mtCompiler,
178                                 &TrainingData::Key::hash,
179                                 &TrainingData::Key::equals>
180       _table;
181 
182   public:
183     template<typename... Arg>
184     TrainingDataSet(Arg... arg)
185       : _table(arg...) {
186     }
187     TrainingData* find(const Key* key) const {
188       TrainingDataLocker::assert_locked();
189       if (TrainingDataLocker::can_add()) {
190         auto res = _table.get(key);
191         return res == nullptr ? nullptr : *res;
192       }
193       return nullptr;
194     }
195     bool remove(const Key* key) {
196       return _table.remove(key);
197     }
198     TrainingData* install(TrainingData* td) {
199       TrainingDataLocker::assert_locked();
200       TrainingDataLocker::assert_can_add();
201       auto key = td->key();
202       if (key->is_empty()) {
203         return td;  // unkeyed TD not installed
204       }
205       bool created = false;
206       auto prior = _table.put_if_absent(key, td, &created);
207       if (prior == nullptr || *prior == td) {
208         return td;
209       }
210       assert(false, "no pre-existing elements allowed");
211       return *prior;
212     }
213     template<typename Function>
214     void iterate(const Function& fn) const { // lambda enabled API
215       iterate(const_cast<Function&>(fn));
216     }
217     template<typename Function>
218     void iterate(Function& fn) const { // lambda enabled API
219       return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); });
220     }
221     int size() const { return _table.number_of_entries(); }
222 
223     void verify() const {
224       TrainingDataLocker::assert_locked();
225       iterate([&](TrainingData* td) { td->verify(); });
226     }
227   };
228 
229   // A widget to ensure that we visit TD object only once (TD objects can have pointer to
230   // other TD object that are sometimes circular).
231   class Visitor {
232     ResizeableHashTable<TrainingData*, bool> _visited;
233   public:
234     Visitor(unsigned size) : _visited(size, 0x3fffffff) { }
235     bool is_visited(TrainingData* td) {
236       return _visited.contains(td);
237     }
238     void visit(TrainingData* td) {
239       bool created;
240       _visited.put_if_absent(td, &created);
241     }
242   };
243 
244   typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary;
245 private:
246   Key _key;
247 
248   // just forward all constructor arguments to the embedded key
249   template<typename... Arg>
250   TrainingData(Arg... arg)
251     : _key(arg...) { }
252 
253   // Container for recording TD during training run
254   static TrainingDataSet _training_data_set;
255   // Containter for replaying the training data (read-only, populated from the AOT image)
256   static TrainingDataDictionary _archived_training_data_dictionary;
257   // Container used for writing the AOT image
258   static TrainingDataDictionary _archived_training_data_dictionary_for_dumping;
259   class DumpTimeTrainingDataInfo {
260     TrainingData* _training_data;
261   public:
262     DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {}
263     DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {}
264     void metaspace_pointers_do(MetaspaceClosure* it) {
265       it->push(&_training_data);
266     }
267     TrainingData* training_data() {
268       return _training_data;
269     }
270   };
271   typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary;
272   // A temporary container that is used to accumulate and filter TD during dumping
273   static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary;
274 
275   static TrainingDataSet* training_data_set() { return &_training_data_set; }
276   static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; }
277 
278  public:
279   // Returns the key under which this TD is installed, or else
280   // Key::EMPTY if it is not installed.
281   const Key* key() const { return &_key; }
282 
283   static bool have_data() { return AOTReplayTraining;  } // Going to read
284   static bool need_data() { return AOTRecordTraining;  } // Going to write
285   static bool assembling_data() { return have_data() && CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes(); }
286 
287   static bool is_klass_loaded(Klass* k) {
288     if (have_data()) {
289       // If we're running in AOT mode some classes may not be loaded yet
290       if (k->is_objArray_klass()) {
291         k = ObjArrayKlass::cast(k)->bottom_klass();
292       }
293       if (k->is_instance_klass()) {
294         return InstanceKlass::cast(k)->is_loaded();
295       }
296     }
297     return true;
298   }
299 
300   template<typename Function>
301   static void iterate(const Function& fn) { iterate(const_cast<Function&>(fn)); }
302 
303   template<typename Function>
304   static void iterate(Function& fn) { // lambda enabled API
305     TrainingDataLocker l;
306     if (have_data()) {
307       archived_training_data_dictionary()->iterate(fn);
308     }
309     if (need_data()) {
310       training_data_set()->iterate(fn);
311     }
312   }
313 
314   virtual MethodTrainingData*   as_MethodTrainingData()  const { return nullptr; }
315   virtual KlassTrainingData*    as_KlassTrainingData()   const { return nullptr; }
316   virtual CompileTrainingData*  as_CompileTrainingData() const { return nullptr; }
317   bool is_MethodTrainingData()  const { return as_MethodTrainingData()  != nullptr; }
318   bool is_KlassTrainingData()   const { return as_KlassTrainingData()   != nullptr; }
319   bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; }
320 
321   virtual void prepare(Visitor& visitor) = 0;
322   virtual void cleanup(Visitor& visitor) = 0;
323 
324   static void initialize() NOT_CDS_RETURN;
325 
326   static void verify();
327 
328   // Widget for recording dependencies, as an N-to-M graph relation,
329   // possibly cyclic.
330   template<typename E>
331   class DepList : public StackObj {
332     GrowableArrayCHeap<E, mtCompiler>* _deps_dyn;
333     Array<E>*                          _deps;
334   public:
335     DepList() {
336       _deps_dyn = nullptr;
337       _deps = nullptr;
338     }
339 
340     int length() const {
341       return (_deps_dyn != nullptr ? _deps_dyn->length()
342               : _deps   != nullptr ? _deps->length()
343               : 0);
344     }
345     E* adr_at(int i) const {
346       return (_deps_dyn != nullptr ? _deps_dyn->adr_at(i)
347               : _deps   != nullptr ? _deps->adr_at(i)
348               : nullptr);
349     }
350     E at(int i) const {
351       assert(i >= 0 && i < length(), "oob");
352       return *adr_at(i);
353     }
354     bool append_if_missing(E dep) {
355       if (_deps_dyn == nullptr) {
356         _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10);
357         _deps_dyn->append(dep);
358         return true;
359       } else {
360         return _deps_dyn->append_if_missing(dep);
361       }
362     }
363     bool remove_if_existing(E dep) {
364       if (_deps_dyn != nullptr) {
365         return _deps_dyn->remove_if_existing(dep);
366       }
367       return false;
368     }
369     void clear() {
370       if (_deps_dyn != nullptr)  {
371         _deps_dyn->clear();
372       }
373     }
374     void append(E dep) {
375       if (_deps_dyn == nullptr) {
376         _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10);
377       }
378       _deps_dyn->append(dep);
379     }
380     bool contains(E dep) {
381       for (int i = 0; i < length(); i++) {
382         if (dep == at(i)) {
383           return true; // found
384         }
385       }
386       return false; // not found
387     }
388 
389 #if INCLUDE_CDS
390     void remove_unshareable_info() {
391       _deps_dyn = nullptr;
392     }
393 #endif
394     void prepare(ClassLoaderData* loader_data);
395     void metaspace_pointers_do(MetaspaceClosure *iter);
396   };
397 
398   virtual void metaspace_pointers_do(MetaspaceClosure *iter);
399 
400   static void init_dumptime_table(TRAPS);
401 
402 #if INCLUDE_CDS
403   virtual void remove_unshareable_info() {}
404   static void iterate_roots(MetaspaceClosure* it);
405   static void dump_training_data();
406   static void cleanup_training_data();
407   static void serialize(SerializeClosure* soc);
408   static void print_archived_training_data_on(outputStream* st);
409   static TrainingData* lookup_archived_training_data(const Key* k);
410 #endif
411 
412   template<typename TrainingDataType, typename... ArgTypes>
413   static TrainingDataType* allocate(ArgTypes... args) {
414     assert(need_data() || have_data(), "");
415     if (TrainingDataLocker::can_add()) {
416       return new (mtClassShared) TrainingDataType(args...);
417     }
418     return nullptr;
419   }
420 };
421 
422 // Training data that is associated with an InstanceKlass
423 class KlassTrainingData : public TrainingData {
424   friend TrainingData;
425   friend CompileTrainingData;
426 
427   // Used by CDS. These classes need to access the private default constructor.
428   template <class T> friend class CppVtableTesterA;
429   template <class T> friend class CppVtableTesterB;
430   template <class T> friend class CppVtableCloner;
431 
432   // cross-link to live klass, or null if not loaded or encountered yet
433   InstanceKlass* _holder;
434 
435   DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me
436 
437   KlassTrainingData();
438   KlassTrainingData(InstanceKlass* klass);
439 
440   int comp_dep_count() const {
441     TrainingDataLocker::assert_locked();
442     return _comp_deps.length();
443   }
444   CompileTrainingData* comp_dep(int i) const {
445     TrainingDataLocker::assert_locked();
446     return _comp_deps.at(i);
447   }
448   void add_comp_dep(CompileTrainingData* ctd) {
449     TrainingDataLocker::assert_locked();
450      _comp_deps.append_if_missing(ctd);
451   }
452   void remove_comp_dep(CompileTrainingData* ctd) {
453     TrainingDataLocker::assert_locked();
454      _comp_deps.remove_if_existing(ctd);
455   }
456  public:
457   Symbol* name() const {
458     precond(has_holder());
459     return holder()->name();
460   }
461   bool has_holder()       const { return _holder != nullptr; }
462   InstanceKlass* holder() const { return _holder; }
463 
464   static KlassTrainingData* make(InstanceKlass* holder,
465                                  bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr);
466   static KlassTrainingData* find(InstanceKlass* holder) {
467     return make(holder, true);
468   }
469   virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); };
470 
471   ClassLoaderData* class_loader_data() {
472     assert(has_holder(), "");
473     return holder()->class_loader_data();
474   }
475   void notice_fully_initialized() NOT_CDS_RETURN;
476 
477   void print_on(outputStream* st, bool name_only) const;
478   virtual void print_on(outputStream* st) const { print_on(st, false); }
479   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
480 
481   virtual void prepare(Visitor& visitor);
482   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
483 
484   MetaspaceObj::Type type() const {
485     return KlassTrainingDataType;
486   }
487 
488 #if INCLUDE_CDS
489   virtual void remove_unshareable_info();
490 #endif
491 
492   void metaspace_pointers_do(MetaspaceClosure *iter);
493 
494   int size() const {
495     return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord);
496   }
497 
498   const char* internal_name() const {
499     return "{ klass training data }";
500   };
501 
502   void verify();
503 
504   static KlassTrainingData* allocate(InstanceKlass* holder) {
505     return TrainingData::allocate<KlassTrainingData>(holder);
506   }
507 
508   template<typename Function>
509   void iterate_comp_deps(Function fn) const { // lambda enabled API
510     TrainingDataLocker l;
511     for (int i = 0; i < comp_dep_count(); i++) {
512       fn(comp_dep(i));
513     }
514   }
515 };
516 
517 // Information about particular JIT tasks.
518 class CompileTrainingData : public TrainingData {
519   friend TrainingData;
520   friend KlassTrainingData;
521 
522   // Used by CDS. These classes need to access the private default constructor.
523   template <class T> friend class CppVtableTesterA;
524   template <class T> friend class CppVtableTesterB;
525   template <class T> friend class CppVtableCloner;
526 
527   MethodTrainingData* _method;
528   const short _level;
529   const int _compile_id;
530 
531   // classes that should be initialized before this JIT task runs
532   DepList<KlassTrainingData*> _init_deps;
533   // Number of uninitialized classes left, when it's 0, all deps are satisfied
534   volatile int _init_deps_left;
535 
536 public:
537   // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record
538   // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them.
539   class ciRecords {
540     template <typename... Ts> class Arguments {
541     public:
542       bool operator==(const Arguments<>&) const { return true; }
543       void metaspace_pointers_do(MetaspaceClosure *iter) { }
544     };
545     template <typename T, typename... Ts> class Arguments<T, Ts...> {
546     private:
547       T _first;
548       Arguments<Ts...> _remaining;
549 
550     public:
551       constexpr Arguments(const T& first, const Ts&... remaining) noexcept
552         : _first(first), _remaining(remaining...) {}
553       constexpr Arguments() noexcept : _first(), _remaining() {}
554       bool operator==(const Arguments<T, Ts...>& that) const {
555         return _first == that._first && _remaining == that._remaining;
556       }
557       template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)>
558       void metaspace_pointers_do(MetaspaceClosure *iter) {
559         iter->push(&_first);
560         _remaining.metaspace_pointers_do(iter);
561       }
562       template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))>
563       void metaspace_pointers_do(MetaspaceClosure *iter) {
564         _remaining.metaspace_pointers_do(iter);
565       }
566     };
567 
568     template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj {
569     public:
570       class OptionalReturnType {
571         bool _valid;
572         ReturnType _result;
573       public:
574         OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {}
575         bool is_valid() const { return _valid; }
576         ReturnType result() const { return _result; }
577       };
578     private:
579       typedef Arguments<Args...> ArgumentsType;
580       class Record : public MetaspaceObj {
581         ReturnType    _result;
582         ArgumentsType _arguments;
583       public:
584         Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {}
585         Record() { }
586         ReturnType result() const { return _result; }
587         ArgumentsType arguments() const { return _arguments; }
588         bool operator==(const Record& that) { return _arguments == that._arguments; }
589         void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); }
590       };
591       DepList<Record> _data;
592     public:
593       OptionalReturnType find(const Args&... args) {
594         ArgumentsType a(args...);
595         for (int i = 0; i < _data.length(); i++) {
596           if (_data.at(i).arguments() == a) {
597             return OptionalReturnType(true, _data.at(i).result());
598           }
599         }
600         return OptionalReturnType(false, ReturnType());
601       }
602       bool append_if_missing(const ReturnType& result, const Args&... args) {
603         return _data.append_if_missing(Record(result, ArgumentsType(args...)));
604       }
605 #if INCLUDE_CDS
606       void remove_unshareable_info() { _data.remove_unshareable_info(); }
607 #endif
608       void prepare(ClassLoaderData* loader_data) {
609         _data.prepare(loader_data);
610       }
611       void metaspace_pointers_do(MetaspaceClosure *iter) {
612         _data.metaspace_pointers_do(iter);
613       }
614     };
615 
616 
617 public:
618     // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to
619     // compile methods in a different order than in the training run.
620     typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type;
621     ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size;
622 #if INCLUDE_CDS
623     void remove_unshareable_info() {
624       ciMethod__inline_instructions_size.remove_unshareable_info();
625     }
626 #endif
627     void prepare(ClassLoaderData* loader_data) {
628       ciMethod__inline_instructions_size.prepare(loader_data);
629     }
630     void metaspace_pointers_do(MetaspaceClosure *iter) {
631       ciMethod__inline_instructions_size.metaspace_pointers_do(iter);
632     }
633   };
634 
635 private:
636   ciRecords _ci_records;
637 
638   CompileTrainingData();
639   CompileTrainingData(MethodTrainingData* mtd,
640                       int level,
641                       int compile_id)
642       : TrainingData(),  // empty key
643         _method(mtd), _level(level), _compile_id(compile_id), _init_deps_left(0) { }
644 public:
645   ciRecords& ci_records() { return _ci_records; }
646   static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr);
647 
648   virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); };
649 
650   MethodTrainingData* method() const { return _method; }
651 
652   int level() const { return _level; }
653 
654   int compile_id() const { return _compile_id; }
655 
656   int init_dep_count() const {
657     TrainingDataLocker::assert_locked();
658     return _init_deps.length();
659   }
660   KlassTrainingData* init_dep(int i) const {
661     TrainingDataLocker::assert_locked();
662     return _init_deps.at(i);
663   }
664   void add_init_dep(KlassTrainingData* ktd) {
665     TrainingDataLocker::assert_locked();
666     ktd->add_comp_dep(this);
667     _init_deps.append_if_missing(ktd);
668   }
669   void clear_init_deps() {
670     TrainingDataLocker::assert_locked();
671     for (int i = 0; i < _init_deps.length(); i++) {
672       _init_deps.at(i)->remove_comp_dep(this);
673     }
674     _init_deps.clear();
675   }
676   void dec_init_deps_left_release(KlassTrainingData* ktd);
677   int init_deps_left_acquire() const {
678     return Atomic::load_acquire(&_init_deps_left);
679   }
680   uint compute_init_deps_left(bool count_initialized = false);
681 
682   void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN;
683 
684   // The JIT looks at classes and objects too and can depend on their state.
685   // These simple calls just report the *possibility* of an observation.
686   void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN;
687 
688   virtual void prepare(Visitor& visitor);
689   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
690 
691   void print_on(outputStream* st, bool name_only) const;
692   virtual void print_on(outputStream* st) const { print_on(st, false); }
693   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
694 
695 #if INCLUDE_CDS
696   virtual void remove_unshareable_info();
697 #endif
698 
699   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
700   virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; }
701 
702   virtual const char* internal_name() const {
703     return "{ compile training data }";
704   };
705 
706   virtual int size() const {
707     return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord);
708   }
709 
710   void verify(bool verify_dep_counter);
711 
712   static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) {
713     return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id);
714   }
715 };
716 
717 // Record information about a method at the time compilation is requested.
718 class MethodTrainingData : public TrainingData {
719   friend TrainingData;
720   friend CompileTrainingData;
721 
722   // Used by CDS. These classes need to access the private default constructor.
723   template <class T> friend class CppVtableTesterA;
724   template <class T> friend class CppVtableTesterB;
725   template <class T> friend class CppVtableCloner;
726 
727   KlassTrainingData* _klass;
728   Method* _holder;
729   CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1];
730   int _highest_top_level;
731   int _level_mask;  // bit-set of all possible levels
732   bool _was_toplevel;
733   // metadata snapshots of final state:
734   MethodCounters* _final_counters;
735   MethodData*     _final_profile;
736 
737   MethodTrainingData();
738   MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) {
739     _klass = ktd;
740     _holder = method;
741     for (int i = 0; i < CompLevel_count - 1; i++) {
742       _last_toplevel_compiles[i] = nullptr;
743     }
744     _highest_top_level = CompLevel_none;
745     _level_mask = 0;
746     _was_toplevel = false;
747   }
748 
749   static int level_mask(int level) {
750     return ((level & 0xF) != level ? 0 : 1 << level);
751   }
752 
753  public:
754   KlassTrainingData* klass()  const { return _klass; }
755   bool has_holder()           const { return _holder != nullptr; }
756   Method* holder()            const { return _holder; }
757   bool only_inlined()         const { return !_was_toplevel; }
758   bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; }
759   int highest_top_level()     const { return _highest_top_level; }
760   MethodData* final_profile() const { return _final_profile; }
761 
762   Symbol* name() const {
763     precond(has_holder());
764     return holder()->name();
765   }
766   Symbol* signature() const {
767     precond(has_holder());
768     return holder()->signature();
769   }
770 
771   CompileTrainingData* last_toplevel_compile(int level) const {
772     if (level > CompLevel_none) {
773       return _last_toplevel_compiles[level - 1];
774     }
775     return nullptr;
776   }
777 
778   void notice_compilation(int level, bool inlined = false) {
779     if (!inlined) {
780       _was_toplevel = true;
781     }
782     _level_mask |= level_mask(level);
783   }
784 
785   void notice_toplevel_compilation(int level) {
786     _highest_top_level = MAX2(_highest_top_level, level);
787   }
788 
789   static MethodTrainingData* make(const methodHandle& method,
790                                   bool null_if_not_found = false,
791                                   bool use_cache = true) NOT_CDS_RETURN_(nullptr);
792   static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); }
793   static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); }
794 
795   virtual MethodTrainingData* as_MethodTrainingData() const {
796     return const_cast<MethodTrainingData*>(this);
797   };
798 
799   void print_on(outputStream* st, bool name_only) const;
800   virtual void print_on(outputStream* st) const { print_on(st, false); }
801   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
802 
803   virtual void prepare(Visitor& visitor);
804   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
805 
806   template<typename Function>
807   void iterate_compiles(Function fn) const { // lambda enabled API
808     for (int i = 0; i < CompLevel_count - 1; i++) {
809       CompileTrainingData* ctd = _last_toplevel_compiles[i];
810       if (ctd != nullptr) {
811         fn(ctd);
812       }
813     }
814   }
815 
816   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
817   virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; }
818 
819 #if INCLUDE_CDS
820   virtual void remove_unshareable_info();
821 #endif
822 
823   virtual int size() const {
824     return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord);
825   }
826 
827   virtual const char* internal_name() const {
828     return "{ method training data }";
829   };
830 
831   void verify(bool verify_dep_counter);
832 
833   static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) {
834     return TrainingData::allocate<MethodTrainingData>(m, ktd);
835   }
836 };
837 #endif // SHARE_OOPS_TRAININGDATA_HPP