1 /*
  2  * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OOPS_TRAININGDATA_HPP
 26 #define SHARE_OOPS_TRAININGDATA_HPP
 27 
 28 #include "cds/cdsConfig.hpp"
 29 #include "classfile/compactHashtable.hpp"
 30 #include "compiler/compiler_globals.hpp"
 31 #include "compiler/compilerDefinitions.hpp"
 32 #include "memory/allocation.hpp"
 33 #include "memory/metaspaceClosure.hpp"
 34 #include "oops/instanceKlass.hpp"
 35 #include "oops/method.hpp"
 36 #include "oops/objArrayKlass.hpp"
 37 #include "runtime/handles.hpp"
 38 #include "runtime/mutexLocker.hpp"
 39 #include "utilities/resizableHashTable.hpp"
 40 
 41 class ciEnv;
 42 class ciBaseObject;
 43 class CompileTask;
 44 class CompileTrainingData;
 45 class KlassTrainingData;
 46 class MethodTrainingData;
 47 
 48 // Base class for all the training data varieties
 49 class TrainingData : public Metadata {
 50   friend KlassTrainingData;
 51   friend MethodTrainingData;
 52   friend CompileTrainingData;
 53 public:
 54   // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a
 55   // pointer to a metaspace object the TD is associated with. For example,
 56   // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method.
 57   // The utility of the these hash tables is to be able to find a TD object for a given metaspace
 58   // metaspace object.
 59   class Key {
 60     mutable Metadata* _meta;
 61     // These guys can get to my constructors:
 62     friend TrainingData;
 63     friend KlassTrainingData;
 64     friend MethodTrainingData;
 65     friend CompileTrainingData;
 66 
 67     // The empty key
 68     Key() : _meta(nullptr) { }
 69     bool is_empty() const { return _meta == nullptr; }
 70   public:
 71     Key(Metadata* meta) : _meta(meta) { }
 72 
 73     static bool can_compute_cds_hash(const Key* const& k);
 74     static uint cds_hash(const Key* const& k);
 75     static unsigned hash(const Key* const& k) {
 76       return primitive_hash(k->meta());
 77     }
 78     static bool equals(const Key* const& k1, const Key* const& k2) {
 79       return k1->meta() == k2->meta();
 80     }
 81     static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) {
 82       return equals(value->key(), key);
 83     }
 84     int cmp(const Key* that) const {
 85       auto m1 = this->meta();
 86       auto m2 = that->meta();
 87       if (m1 < m2) return -1;
 88       if (m1 > m2) return +1;
 89       return 0;
 90     }
 91     Metadata* meta() const { return _meta; }
 92     void metaspace_pointers_do(MetaspaceClosure *iter);
 93     void make_empty() const { _meta = nullptr; }
 94   };
 95 
 96   // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures.
 97   // It supports recursive locking and a read-only mode (in which case no locks are taken).
 98   // It is also a part of the TD collection termination protocol (see the "snapshot" field).
 99   class TrainingDataLocker {
100 #if INCLUDE_CDS
101     static volatile bool _snapshot; // If true we're not allocating new training data
102 #endif
103     static int _lock_mode;
104     const bool _recursive;
105     static void lock() {
106 #if INCLUDE_CDS
107       assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
108       if (_lock_mode > 0) {
109         TrainingData_lock->lock_without_safepoint_check();
110       }
111 #endif
112     }
113     static void unlock() {
114 #if INCLUDE_CDS
115       if (_lock_mode > 0) {
116         TrainingData_lock->unlock();
117       }
118 #endif
119     }
120     static bool safely_locked() {
121 #if INCLUDE_CDS
122       assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()");
123       if (_lock_mode > 0) {
124         return is_self_locked();
125       } else {
126         return true;
127       }
128 #else
129       return true;
130 #endif
131     }
132     static bool is_self_locked() {
133       return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false);
134     }
135 
136   public:
137     static void snapshot() {
138 #if INCLUDE_CDS
139       assert_locked();
140       _snapshot = true;
141 #endif
142     }
143     static bool can_add() {
144 #if INCLUDE_CDS
145       assert_locked();
146       return !_snapshot;
147 #else
148       return false;
149 #endif
150     }
151     static void initialize() {
152 #if INCLUDE_CDS
153       _lock_mode = need_data() ? +1 : -1;   // if -1, we go lock-free
154 #endif
155     }
156     static void assert_locked_or_snapshotted() {
157 #if INCLUDE_CDS
158       assert(safely_locked() || _snapshot, "use under TrainingDataLocker or after snapshot");
159 #endif
160     }
161     static void assert_locked() {
162       assert(safely_locked(), "use under TrainingDataLocker");
163     }
164     static void assert_can_add() {
165       assert(can_add(), "Cannot add TrainingData objects");
166     }
167     TrainingDataLocker() : _recursive(is_self_locked()) {
168       if (!_recursive) {
169         lock();
170       }
171     }
172     ~TrainingDataLocker() {
173       if (!_recursive) {
174         unlock();
175       }
176     }
177   };
178 
179   // A set of TD objects that we collect during the training run.
180   class TrainingDataSet {
181     friend TrainingData;
182     ResizeableHashTable<const Key*, TrainingData*,
183                                 AnyObj::C_HEAP, MemTag::mtCompiler,
184                                 &TrainingData::Key::hash,
185                                 &TrainingData::Key::equals>
186       _table;
187 
188   public:
189     template<typename... Arg>
190     TrainingDataSet(Arg... arg)
191       : _table(arg...) {
192     }
193     TrainingData* find(const Key* key) const {
194       TrainingDataLocker::assert_locked();
195       if (TrainingDataLocker::can_add()) {
196         auto res = _table.get(key);
197         return res == nullptr ? nullptr : *res;
198       }
199       return nullptr;
200     }
201     bool remove(const Key* key) {
202       return _table.remove(key);
203     }
204     TrainingData* install(TrainingData* td) {
205       TrainingDataLocker::assert_locked();
206       TrainingDataLocker::assert_can_add();
207       auto key = td->key();
208       if (key->is_empty()) {
209         return td;  // unkeyed TD not installed
210       }
211       bool created = false;
212       auto prior = _table.put_if_absent(key, td, &created);
213       if (prior == nullptr || *prior == td) {
214         return td;
215       }
216       assert(false, "no pre-existing elements allowed");
217       return *prior;
218     }
219     template<typename Function>
220     void iterate(const Function& fn) const { // lambda enabled API
221       iterate(const_cast<Function&>(fn));
222     }
223     template<typename Function>
224     void iterate(Function& fn) const { // lambda enabled API
225       return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); });
226     }
227     int size() const { return _table.number_of_entries(); }
228 
229     void verify() const {
230       TrainingDataLocker::assert_locked();
231       iterate([&](TrainingData* td) { td->verify(); });
232     }
233   };
234 
235   // A widget to ensure that we visit TD object only once (TD objects can have pointer to
236   // other TD object that are sometimes circular).
237   class Visitor {
238     ResizeableHashTable<TrainingData*, bool> _visited;
239   public:
240     Visitor(unsigned size) : _visited(size, 0x3fffffff) { }
241     bool is_visited(TrainingData* td) {
242       return _visited.contains(td);
243     }
244     void visit(TrainingData* td) {
245       bool created;
246       _visited.put_if_absent(td, &created);
247     }
248   };
249 
250   typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary;
251 private:
252   Key _key;
253 
254   // just forward all constructor arguments to the embedded key
255   template<typename... Arg>
256   TrainingData(Arg... arg)
257     : _key(arg...) { }
258 
259   // Container for recording TD during training run
260   static TrainingDataSet _training_data_set;
261   // Containter for replaying the training data (read-only, populated from the AOT image)
262   static TrainingDataDictionary _archived_training_data_dictionary;
263   // Container used for writing the AOT image
264   static TrainingDataDictionary _archived_training_data_dictionary_for_dumping;
265   class DumpTimeTrainingDataInfo {
266     TrainingData* _training_data;
267   public:
268     DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {}
269     DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {}
270     void metaspace_pointers_do(MetaspaceClosure* it) {
271       it->push(&_training_data);
272     }
273     TrainingData* training_data() {
274       return _training_data;
275     }
276   };
277   typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary;
278   // A temporary container that is used to accumulate and filter TD during dumping
279   static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary;
280 
281   static TrainingDataSet* training_data_set() { return &_training_data_set; }
282   static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; }
283 
284  public:
285   // Returns the key under which this TD is installed, or else
286   // Key::EMPTY if it is not installed.
287   const Key* key() const { return &_key; }
288 
289   static bool have_data() { return AOTReplayTraining;  } // Going to read
290   static bool need_data() { return AOTRecordTraining;  } // Going to write
291   static bool assembling_data() { return have_data() && CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes(); }
292 
293   static bool is_klass_loaded(Klass* k) {
294     if (have_data()) {
295       // If we're running in AOT mode some classes may not be loaded yet
296       if (k->is_objArray_klass()) {
297         k = ObjArrayKlass::cast(k)->bottom_klass();
298       }
299       if (k->is_instance_klass()) {
300         return InstanceKlass::cast(k)->is_loaded();
301       }
302     }
303     return true;
304   }
305 
306   template<typename Function>
307   static void iterate(const Function& fn) { iterate(const_cast<Function&>(fn)); }
308 
309   template<typename Function>
310   static void iterate(Function& fn) { // lambda enabled API
311     TrainingDataLocker l;
312     if (have_data()) {
313       archived_training_data_dictionary()->iterate(fn);
314     }
315     if (need_data()) {
316       training_data_set()->iterate(fn);
317     }
318   }
319 
320   virtual MethodTrainingData*   as_MethodTrainingData()  const { return nullptr; }
321   virtual KlassTrainingData*    as_KlassTrainingData()   const { return nullptr; }
322   virtual CompileTrainingData*  as_CompileTrainingData() const { return nullptr; }
323   bool is_MethodTrainingData()  const { return as_MethodTrainingData()  != nullptr; }
324   bool is_KlassTrainingData()   const { return as_KlassTrainingData()   != nullptr; }
325   bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; }
326 
327   virtual void prepare(Visitor& visitor) = 0;
328   virtual void cleanup(Visitor& visitor) = 0;
329 
330   static void initialize() NOT_CDS_RETURN;
331 
332   static void verify();
333 
334   // Widget for recording dependencies, as an N-to-M graph relation,
335   // possibly cyclic.
336   template<typename E>
337   class DepList : public StackObj {
338     GrowableArrayCHeap<E, mtCompiler>* _deps_dyn;
339     Array<E>*                          _deps;
340   public:
341     DepList() {
342       _deps_dyn = nullptr;
343       _deps = nullptr;
344     }
345 
346     int length() const {
347       TrainingDataLocker::assert_locked_or_snapshotted();
348       return (_deps_dyn != nullptr ? _deps_dyn->length()
349               : _deps   != nullptr ? _deps->length()
350               : 0);
351     }
352     E* adr_at(int i) const {
353       TrainingDataLocker::assert_locked_or_snapshotted();
354       return (_deps_dyn != nullptr ? _deps_dyn->adr_at(i)
355               : _deps   != nullptr ? _deps->adr_at(i)
356               : nullptr);
357     }
358     E at(int i) const {
359       TrainingDataLocker::assert_locked_or_snapshotted();
360       assert(i >= 0 && i < length(), "oob");
361       return *adr_at(i);
362     }
363     bool append_if_missing(E dep) {
364       TrainingDataLocker::assert_can_add();
365       if (_deps_dyn == nullptr) {
366         _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10);
367         _deps_dyn->append(dep);
368         return true;
369       } else {
370         return _deps_dyn->append_if_missing(dep);
371       }
372     }
373     bool remove_if_existing(E dep) {
374       TrainingDataLocker::assert_can_add();
375       if (_deps_dyn != nullptr) {
376         return _deps_dyn->remove_if_existing(dep);
377       }
378       return false;
379     }
380     void clear() {
381       TrainingDataLocker::assert_can_add();
382       if (_deps_dyn != nullptr)  {
383         _deps_dyn->clear();
384       }
385     }
386     void append(E dep) {
387       TrainingDataLocker::assert_can_add();
388       if (_deps_dyn == nullptr) {
389         _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10);
390       }
391       _deps_dyn->append(dep);
392     }
393     bool contains(E dep) {
394       TrainingDataLocker::assert_locked();
395       for (int i = 0; i < length(); i++) {
396         if (dep == at(i)) {
397           return true; // found
398         }
399       }
400       return false; // not found
401     }
402 
403 #if INCLUDE_CDS
404     void remove_unshareable_info() {
405       _deps_dyn = nullptr;
406     }
407 #endif
408     void prepare();
409     void metaspace_pointers_do(MetaspaceClosure *iter);
410   };
411 
412   virtual void metaspace_pointers_do(MetaspaceClosure *iter);
413 
414   static void init_dumptime_table(TRAPS);
415 
416 #if INCLUDE_CDS
417   virtual void remove_unshareable_info() {}
418   static void iterate_roots(MetaspaceClosure* it);
419   static void dump_training_data();
420   static void cleanup_training_data();
421   static void serialize(SerializeClosure* soc);
422   static void print_archived_training_data_on(outputStream* st);
423   static TrainingData* lookup_archived_training_data(const Key* k);
424 #endif
425 
426   template<typename TrainingDataType, typename... ArgTypes>
427   static TrainingDataType* allocate(ArgTypes... args) {
428     assert(need_data() || have_data(), "");
429     if (TrainingDataLocker::can_add()) {
430       return new (mtClassShared) TrainingDataType(args...);
431     }
432     return nullptr;
433   }
434 };
435 
436 // Training data that is associated with an InstanceKlass
437 class KlassTrainingData : public TrainingData {
438   friend TrainingData;
439   friend CompileTrainingData;
440 
441   // Used by CDS. These classes need to access the private default constructor.
442   template <class T> friend class CppVtableTesterA;
443   template <class T> friend class CppVtableTesterB;
444   template <class T> friend class CppVtableCloner;
445 
446   // cross-link to live klass, or null if not loaded or encountered yet
447   InstanceKlass* _holder;
448 
449   DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me
450 
451   KlassTrainingData();
452   KlassTrainingData(InstanceKlass* klass);
453 
454   int comp_dep_count() const {
455     TrainingDataLocker::assert_locked();
456     return _comp_deps.length();
457   }
458   CompileTrainingData* comp_dep(int i) const {
459     TrainingDataLocker::assert_locked();
460     return _comp_deps.at(i);
461   }
462   void add_comp_dep(CompileTrainingData* ctd) {
463     TrainingDataLocker::assert_locked();
464      _comp_deps.append_if_missing(ctd);
465   }
466   void remove_comp_dep(CompileTrainingData* ctd) {
467     TrainingDataLocker::assert_locked();
468      _comp_deps.remove_if_existing(ctd);
469   }
470  public:
471   Symbol* name() const {
472     precond(has_holder());
473     return holder()->name();
474   }
475   bool has_holder()       const { return _holder != nullptr; }
476   InstanceKlass* holder() const { return _holder; }
477 
478   static KlassTrainingData* make(InstanceKlass* holder,
479                                  bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr);
480   static KlassTrainingData* find(InstanceKlass* holder) {
481     return make(holder, true);
482   }
483   virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); };
484 
485   void notice_fully_initialized() NOT_CDS_RETURN;
486 
487   void print_on(outputStream* st, bool name_only) const;
488   virtual void print_on(outputStream* st) const { print_on(st, false); }
489   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
490 
491   virtual void prepare(Visitor& visitor);
492   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
493 
494   MetaspaceObj::Type type() const {
495     return KlassTrainingDataType;
496   }
497 
498 #if INCLUDE_CDS
499   virtual void remove_unshareable_info();
500 #endif
501 
502   void metaspace_pointers_do(MetaspaceClosure *iter);
503 
504   int size() const {
505     return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord);
506   }
507 
508   const char* internal_name() const {
509     return "{ klass training data }";
510   };
511 
512   void verify();
513 
514   static KlassTrainingData* allocate(InstanceKlass* holder) {
515     return TrainingData::allocate<KlassTrainingData>(holder);
516   }
517 
518   template<typename Function>
519   void iterate_comp_deps(Function fn) const { // lambda enabled API
520     TrainingDataLocker l;
521     for (int i = 0; i < comp_dep_count(); i++) {
522       fn(comp_dep(i));
523     }
524   }
525 };
526 
527 // Information about particular JIT tasks.
528 class CompileTrainingData : public TrainingData {
529   friend TrainingData;
530   friend KlassTrainingData;
531 
532   // Used by CDS. These classes need to access the private default constructor.
533   template <class T> friend class CppVtableTesterA;
534   template <class T> friend class CppVtableTesterB;
535   template <class T> friend class CppVtableCloner;
536 
537   MethodTrainingData* _method;
538   const short _level;
539   const int _compile_id;
540 
541   // classes that should be initialized before this JIT task runs
542   DepList<KlassTrainingData*> _init_deps;
543   // Number of uninitialized classes left, when it's 0, all deps are satisfied
544   volatile int _init_deps_left;
545 
546 public:
547   // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record
548   // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them.
549   class ciRecords {
550     template <typename... Ts> class Arguments {
551     public:
552       bool operator==(const Arguments<>&) const { return true; }
553       void metaspace_pointers_do(MetaspaceClosure *iter) { }
554     };
555     template <typename T, typename... Ts> class Arguments<T, Ts...> {
556     private:
557       T _first;
558       Arguments<Ts...> _remaining;
559 
560     public:
561       constexpr Arguments(const T& first, const Ts&... remaining) noexcept
562         : _first(first), _remaining(remaining...) {}
563       constexpr Arguments() noexcept : _first(), _remaining() {}
564       bool operator==(const Arguments<T, Ts...>& that) const {
565         return _first == that._first && _remaining == that._remaining;
566       }
567       template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)>
568       void metaspace_pointers_do(MetaspaceClosure *iter) {
569         iter->push(&_first);
570         _remaining.metaspace_pointers_do(iter);
571       }
572       template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))>
573       void metaspace_pointers_do(MetaspaceClosure *iter) {
574         _remaining.metaspace_pointers_do(iter);
575       }
576     };
577 
578     template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj {
579     public:
580       class OptionalReturnType {
581         bool _valid;
582         ReturnType _result;
583       public:
584         OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {}
585         bool is_valid() const { return _valid; }
586         ReturnType result() const { return _result; }
587       };
588     private:
589       typedef Arguments<Args...> ArgumentsType;
590       class Record : public MetaspaceObj {
591         ReturnType    _result;
592         ArgumentsType _arguments;
593       public:
594         Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {}
595         Record() { }
596         ReturnType result() const { return _result; }
597         ArgumentsType arguments() const { return _arguments; }
598         bool operator==(const Record& that) { return _arguments == that._arguments; }
599         void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); }
600       };
601       DepList<Record> _data;
602     public:
603       OptionalReturnType find(const Args&... args) {
604         TrainingDataLocker l;
605         ArgumentsType a(args...);
606         for (int i = 0; i < _data.length(); i++) {
607           if (_data.at(i).arguments() == a) {
608             return OptionalReturnType(true, _data.at(i).result());
609           }
610         }
611         return OptionalReturnType(false, ReturnType());
612       }
613       void append_if_missing(const ReturnType& result, const Args&... args) {
614         TrainingDataLocker l;
615         if (l.can_add()) {
616           _data.append_if_missing(Record(result, ArgumentsType(args...)));
617         }
618       }
619 #if INCLUDE_CDS
620       void remove_unshareable_info() { _data.remove_unshareable_info(); }
621 #endif
622       void prepare() {
623         _data.prepare();
624       }
625       void metaspace_pointers_do(MetaspaceClosure *iter) {
626         _data.metaspace_pointers_do(iter);
627       }
628     };
629 
630 
631 public:
632     // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to
633     // compile methods in a different order than in the training run.
634     typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type;
635     ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size;
636 #if INCLUDE_CDS
637     void remove_unshareable_info() {
638       ciMethod__inline_instructions_size.remove_unshareable_info();
639     }
640 #endif
641     void prepare() {
642       ciMethod__inline_instructions_size.prepare();
643     }
644     void metaspace_pointers_do(MetaspaceClosure *iter) {
645       ciMethod__inline_instructions_size.metaspace_pointers_do(iter);
646     }
647   };
648 
649 private:
650   ciRecords _ci_records;
651 
652   CompileTrainingData();
653   CompileTrainingData(MethodTrainingData* mtd,
654                       int level,
655                       int compile_id)
656       : TrainingData(),  // empty key
657         _method(mtd), _level(level), _compile_id(compile_id), _init_deps_left(0) { }
658 public:
659   ciRecords& ci_records() { return _ci_records; }
660   static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr);
661 
662   virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); };
663 
664   MethodTrainingData* method() const { return _method; }
665 
666   int level() const { return _level; }
667 
668   int compile_id() const { return _compile_id; }
669 
670   int init_dep_count() const {
671     TrainingDataLocker::assert_locked();
672     return _init_deps.length();
673   }
674   KlassTrainingData* init_dep(int i) const {
675     TrainingDataLocker::assert_locked();
676     return _init_deps.at(i);
677   }
678   void add_init_dep(KlassTrainingData* ktd) {
679     TrainingDataLocker::assert_locked();
680     ktd->add_comp_dep(this);
681     _init_deps.append_if_missing(ktd);
682   }
683   void clear_init_deps() {
684     TrainingDataLocker::assert_locked();
685     for (int i = 0; i < _init_deps.length(); i++) {
686       _init_deps.at(i)->remove_comp_dep(this);
687     }
688     _init_deps.clear();
689   }
690   void dec_init_deps_left_release(KlassTrainingData* ktd);
691   int init_deps_left_acquire() const {
692     return AtomicAccess::load_acquire(&_init_deps_left);
693   }
694   uint compute_init_deps_left(bool count_initialized = false);
695 
696   void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN;
697 
698   // The JIT looks at classes and objects too and can depend on their state.
699   // These simple calls just report the *possibility* of an observation.
700   void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN;
701 
702   virtual void prepare(Visitor& visitor);
703   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
704 
705   void print_on(outputStream* st, bool name_only) const;
706   virtual void print_on(outputStream* st) const { print_on(st, false); }
707   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
708 
709 #if INCLUDE_CDS
710   virtual void remove_unshareable_info();
711 #endif
712 
713   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
714   virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; }
715 
716   virtual const char* internal_name() const {
717     return "{ compile training data }";
718   };
719 
720   virtual int size() const {
721     return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord);
722   }
723 
724   void verify(bool verify_dep_counter);
725 
726   static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) {
727     return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id);
728   }
729 };
730 
731 // Record information about a method at the time compilation is requested.
732 class MethodTrainingData : public TrainingData {
733   friend TrainingData;
734   friend CompileTrainingData;
735 
736   // Used by CDS. These classes need to access the private default constructor.
737   template <class T> friend class CppVtableTesterA;
738   template <class T> friend class CppVtableTesterB;
739   template <class T> friend class CppVtableCloner;
740 
741   KlassTrainingData* _klass;
742   Method* _holder;
743   CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1];
744   int _highest_top_level;
745   int _level_mask;  // bit-set of all possible levels
746   bool _was_toplevel;
747   // metadata snapshots of final state:
748   MethodCounters* _final_counters;
749   MethodData*     _final_profile;
750 
751   MethodTrainingData();
752   MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) {
753     _klass = ktd;
754     _holder = method;
755     for (int i = 0; i < CompLevel_count - 1; i++) {
756       _last_toplevel_compiles[i] = nullptr;
757     }
758     _highest_top_level = CompLevel_none;
759     _level_mask = 0;
760     _was_toplevel = false;
761   }
762 
763   static int level_mask(int level) {
764     return ((level & 0xF) != level ? 0 : 1 << level);
765   }
766 
767  public:
768   KlassTrainingData* klass()  const { return _klass; }
769   bool has_holder()           const { return _holder != nullptr; }
770   Method* holder()            const { return _holder; }
771   bool only_inlined()         const { return !_was_toplevel; }
772   bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; }
773   int highest_top_level()     const { return _highest_top_level; }
774   MethodData* final_profile() const { return _final_profile; }
775 
776   Symbol* name() const {
777     precond(has_holder());
778     return holder()->name();
779   }
780   Symbol* signature() const {
781     precond(has_holder());
782     return holder()->signature();
783   }
784 
785   CompileTrainingData* last_toplevel_compile(int level) const {
786     if (level > CompLevel_none) {
787       return _last_toplevel_compiles[level - 1];
788     }
789     return nullptr;
790   }
791 
792   void notice_compilation(int level, bool inlined = false) {
793     if (!inlined) {
794       _was_toplevel = true;
795     }
796     _level_mask |= level_mask(level);
797   }
798 
799   void notice_toplevel_compilation(int level) {
800     _highest_top_level = MAX2(_highest_top_level, level);
801   }
802 
803   static MethodTrainingData* make(const methodHandle& method,
804                                   bool null_if_not_found = false,
805                                   bool use_cache = true) NOT_CDS_RETURN_(nullptr);
806   static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); }
807   static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); }
808 
809   virtual MethodTrainingData* as_MethodTrainingData() const {
810     return const_cast<MethodTrainingData*>(this);
811   };
812 
813   void print_on(outputStream* st, bool name_only) const;
814   virtual void print_on(outputStream* st) const { print_on(st, false); }
815   virtual void print_value_on(outputStream* st) const { print_on(st, true); }
816 
817   virtual void prepare(Visitor& visitor);
818   virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN;
819 
820   template<typename Function>
821   void iterate_compiles(Function fn) const { // lambda enabled API
822     for (int i = 0; i < CompLevel_count - 1; i++) {
823       CompileTrainingData* ctd = _last_toplevel_compiles[i];
824       if (ctd != nullptr) {
825         fn(ctd);
826       }
827     }
828   }
829 
830   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
831   virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; }
832 
833 #if INCLUDE_CDS
834   virtual void remove_unshareable_info();
835 #endif
836 
837   virtual int size() const {
838     return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord);
839   }
840 
841   virtual const char* internal_name() const {
842     return "{ method training data }";
843   };
844 
845   void verify(bool verify_dep_counter);
846 
847   static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) {
848     return TrainingData::allocate<MethodTrainingData>(m, ktd);
849   }
850 };
851 #endif // SHARE_OOPS_TRAININGDATA_HPP