1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OOPS_TRAININGDATA_HPP 26 #define SHARE_OOPS_TRAININGDATA_HPP 27 28 #include "cds/cdsConfig.hpp" 29 #include "classfile/classLoaderData.hpp" 30 #include "classfile/compactHashtable.hpp" 31 #include "compiler/compilerDefinitions.hpp" 32 #include "compiler/compiler_globals.hpp" 33 #include "memory/allocation.hpp" 34 #include "memory/metaspaceClosure.hpp" 35 #include "oops/instanceKlass.hpp" 36 #include "oops/method.hpp" 37 #include "oops/objArrayKlass.hpp" 38 #include "runtime/handles.hpp" 39 #include "runtime/mutexLocker.hpp" 40 #include "utilities/count_leading_zeros.hpp" 41 #include "utilities/resizeableResourceHash.hpp" 42 43 class ciEnv; 44 class ciBaseObject; 45 class CompileTask; 46 class CompileTrainingData; 47 class KlassTrainingData; 48 class MethodTrainingData; 49 50 // Base class for all the training data varieties 51 class TrainingData : public Metadata { 52 friend KlassTrainingData; 53 friend MethodTrainingData; 54 friend CompileTrainingData; 55 public: 56 // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a 57 // pointer to a metaspace object the TD is associated with. For example, 58 // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method. 59 // The utility of the these hash tables is to be able to find a TD object for a given metaspace 60 // metaspace object. 61 class Key { 62 mutable Metadata* _meta; 63 // These guys can get to my constructors: 64 friend TrainingData; 65 friend KlassTrainingData; 66 friend MethodTrainingData; 67 friend CompileTrainingData; 68 69 // The empty key 70 Key() : _meta(nullptr) { } 71 bool is_empty() const { return _meta == nullptr; } 72 public: 73 Key(Metadata* meta) : _meta(meta) { } 74 75 static bool can_compute_cds_hash(const Key* const& k); 76 static uint cds_hash(const Key* const& k); 77 static unsigned hash(const Key* const& k) { 78 return primitive_hash(k->meta()); 79 } 80 static bool equals(const Key* const& k1, const Key* const& k2) { 81 return k1->meta() == k2->meta(); 82 } 83 static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) { 84 return equals(value->key(), key); 85 } 86 int cmp(const Key* that) const { 87 auto m1 = this->meta(); 88 auto m2 = that->meta(); 89 if (m1 < m2) return -1; 90 if (m1 > m2) return +1; 91 return 0; 92 } 93 Metadata* meta() const { return _meta; } 94 void metaspace_pointers_do(MetaspaceClosure *iter); 95 void make_empty() const { _meta = nullptr; } 96 }; 97 98 // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures. 99 // It supports recursive locking and a read-only mode (in which case no locks are taken). 100 // It is also a part of the TD collection termination protocol (see the "spanshot" field). 101 class TrainingDataLocker { 102 static volatile bool _snapshot; // If true we're not allocating new training data 103 static int _lock_mode; 104 const bool _recursive; 105 static void lock() { 106 #if INCLUDE_CDS 107 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()"); 108 if (_lock_mode > 0) { 109 TrainingData_lock->lock(); 110 } 111 #endif 112 } 113 static void unlock() { 114 #if INCLUDE_CDS 115 if (_lock_mode > 0) { 116 TrainingData_lock->unlock(); 117 } 118 #endif 119 } 120 static bool safely_locked() { 121 #if INCLUDE_CDS 122 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()"); 123 if (_lock_mode > 0) { 124 return is_self_locked(); 125 } else { 126 return true; 127 } 128 #else 129 return true; 130 #endif 131 } 132 static bool is_self_locked() { 133 return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false); 134 } 135 136 public: 137 static void snapshot() { 138 #if INCLUDE_CDS 139 assert_locked(); 140 _snapshot = true; 141 #endif 142 } 143 static bool can_add() { 144 #if INCLUDE_CDS 145 assert_locked(); 146 return !_snapshot; 147 #else 148 return false; 149 #endif 150 } 151 static void initialize() { 152 #if INCLUDE_CDS 153 _lock_mode = need_data() ? +1 : -1; // if -1, we go lock-free 154 #endif 155 } 156 static void assert_locked() { 157 assert(safely_locked(), "use under TrainingDataLocker"); 158 } 159 static void assert_can_add() { 160 assert(can_add(), "Cannot add TrainingData objects"); 161 } 162 TrainingDataLocker() : _recursive(is_self_locked()) { 163 if (!_recursive) { 164 lock(); 165 } 166 } 167 ~TrainingDataLocker() { 168 if (!_recursive) { 169 unlock(); 170 } 171 } 172 }; 173 174 // A set of TD objects that we collect during the training run. 175 class TrainingDataSet { 176 friend TrainingData; 177 ResizeableResourceHashtable<const Key*, TrainingData*, 178 AnyObj::C_HEAP, MemTag::mtCompiler, 179 &TrainingData::Key::hash, 180 &TrainingData::Key::equals> 181 _table; 182 183 public: 184 template<typename... Arg> 185 TrainingDataSet(Arg... arg) 186 : _table(arg...) { 187 } 188 TrainingData* find(const Key* key) const { 189 TrainingDataLocker::assert_locked(); 190 if (TrainingDataLocker::can_add()) { 191 auto res = _table.get(key); 192 return res == nullptr ? nullptr : *res; 193 } 194 return nullptr; 195 } 196 bool remove(const Key* key) { 197 return _table.remove(key); 198 } 199 TrainingData* install(TrainingData* td) { 200 TrainingDataLocker::assert_locked(); 201 TrainingDataLocker::assert_can_add(); 202 auto key = td->key(); 203 if (key->is_empty()) { 204 return td; // unkeyed TD not installed 205 } 206 bool created = false; 207 auto prior = _table.put_if_absent(key, td, &created); 208 if (prior == nullptr || *prior == td) { 209 return td; 210 } 211 assert(false, "no pre-existing elements allowed"); 212 return *prior; 213 } 214 template<typename Function> 215 void iterate(const Function& fn) const { // lambda enabled API 216 iterate(const_cast<Function&>(fn)); 217 } 218 template<typename Function> 219 void iterate(Function& fn) const { // lambda enabled API 220 return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); }); 221 } 222 int size() const { return _table.number_of_entries(); } 223 224 void verify() const { 225 TrainingDataLocker::assert_locked(); 226 iterate([&](TrainingData* td) { td->verify(); }); 227 } 228 }; 229 230 // A widget to ensure that we visit TD object only once (TD objects can have pointer to 231 // other TD object that are sometimes circular). 232 class Visitor { 233 ResizeableResourceHashtable<TrainingData*, bool> _visited; 234 public: 235 Visitor(unsigned size) : _visited(size, 0x3fffffff) { } 236 bool is_visited(TrainingData* td) { 237 return _visited.contains(td); 238 } 239 void visit(TrainingData* td) { 240 bool created; 241 _visited.put_if_absent(td, &created); 242 } 243 }; 244 245 typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary; 246 private: 247 Key _key; 248 249 // just forward all constructor arguments to the embedded key 250 template<typename... Arg> 251 TrainingData(Arg... arg) 252 : _key(arg...) { } 253 254 // Container for recording TD during training run 255 static TrainingDataSet _training_data_set; 256 // Containter for replaying the training data (read-only, populated from the AOT image) 257 static TrainingDataDictionary _archived_training_data_dictionary; 258 // Container used for writing the AOT image 259 static TrainingDataDictionary _archived_training_data_dictionary_for_dumping; 260 class DumpTimeTrainingDataInfo { 261 TrainingData* _training_data; 262 public: 263 DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {} 264 DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {} 265 void metaspace_pointers_do(MetaspaceClosure* it) { 266 it->push(&_training_data); 267 } 268 TrainingData* training_data() { 269 return _training_data; 270 } 271 }; 272 typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary; 273 // A temporary container that is used to accumulate and filter TD during dumping 274 static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary; 275 276 static TrainingDataSet* training_data_set() { return &_training_data_set; } 277 static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; } 278 279 public: 280 // Returns the key under which this TD is installed, or else 281 // Key::EMPTY if it is not installed. 282 const Key* key() const { return &_key; } 283 284 static bool have_data() { return AOTReplayTraining; } // Going to read 285 static bool need_data() { return AOTRecordTraining; } // Going to write 286 static bool assembling_data() { return have_data() && CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes(); } 287 288 static bool is_klass_loaded(Klass* k) { 289 if (have_data()) { 290 // If we're running in AOT mode some classes may not be loaded yet 291 if (k->is_objArray_klass()) { 292 k = ObjArrayKlass::cast(k)->bottom_klass(); 293 } 294 if (k->is_instance_klass()) { 295 return InstanceKlass::cast(k)->is_loaded(); 296 } 297 } 298 return true; 299 } 300 301 template<typename Function> 302 static void iterate(const Function& fn) { iterate(const_cast<Function&>(fn)); } 303 304 template<typename Function> 305 static void iterate(Function& fn) { // lambda enabled API 306 TrainingDataLocker l; 307 if (have_data()) { 308 archived_training_data_dictionary()->iterate(fn); 309 } 310 if (need_data()) { 311 training_data_set()->iterate(fn); 312 } 313 } 314 315 virtual MethodTrainingData* as_MethodTrainingData() const { return nullptr; } 316 virtual KlassTrainingData* as_KlassTrainingData() const { return nullptr; } 317 virtual CompileTrainingData* as_CompileTrainingData() const { return nullptr; } 318 bool is_MethodTrainingData() const { return as_MethodTrainingData() != nullptr; } 319 bool is_KlassTrainingData() const { return as_KlassTrainingData() != nullptr; } 320 bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; } 321 322 virtual void prepare(Visitor& visitor) = 0; 323 virtual void cleanup(Visitor& visitor) = 0; 324 325 static void initialize() NOT_CDS_RETURN; 326 327 static void verify() NOT_CDS_RETURN; 328 329 // Widget for recording dependencies, as an N-to-M graph relation, 330 // possibly cyclic. 331 template<typename E> 332 class DepList : public StackObj { 333 GrowableArrayCHeap<E, mtCompiler>* _deps_dyn; 334 Array<E>* _deps; 335 public: 336 DepList() { 337 _deps_dyn = nullptr; 338 _deps = nullptr; 339 } 340 341 int length() const { 342 return (_deps_dyn != nullptr ? _deps_dyn->length() 343 : _deps != nullptr ? _deps->length() 344 : 0); 345 } 346 E* adr_at(int i) const { 347 return (_deps_dyn != nullptr ? _deps_dyn->adr_at(i) 348 : _deps != nullptr ? _deps->adr_at(i) 349 : nullptr); 350 } 351 E at(int i) const { 352 assert(i >= 0 && i < length(), "oob"); 353 return *adr_at(i); 354 } 355 bool append_if_missing(E dep) { 356 if (_deps_dyn == nullptr) { 357 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10); 358 _deps_dyn->append(dep); 359 return true; 360 } else { 361 return _deps_dyn->append_if_missing(dep); 362 } 363 } 364 bool remove_if_existing(E dep) { 365 if (_deps_dyn != nullptr) { 366 return _deps_dyn->remove_if_existing(dep); 367 } 368 return false; 369 } 370 void clear() { 371 if (_deps_dyn != nullptr) { 372 _deps_dyn->clear(); 373 } 374 } 375 void append(E dep) { 376 if (_deps_dyn == nullptr) { 377 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10); 378 } 379 _deps_dyn->append(dep); 380 } 381 bool contains(E dep) { 382 for (int i = 0; i < length(); i++) { 383 if (dep == at(i)) { 384 return true; // found 385 } 386 } 387 return false; // not found 388 } 389 390 #if INCLUDE_CDS 391 void remove_unshareable_info() { 392 _deps_dyn = nullptr; 393 } 394 #endif 395 void prepare(ClassLoaderData* loader_data); 396 void metaspace_pointers_do(MetaspaceClosure *iter); 397 }; 398 399 virtual void metaspace_pointers_do(MetaspaceClosure *iter); 400 401 static void init_dumptime_table(TRAPS); 402 403 #if INCLUDE_CDS 404 virtual void remove_unshareable_info() {} 405 static void iterate_roots(MetaspaceClosure* it); 406 static void dump_training_data(); 407 static void cleanup_training_data(); 408 static void serialize(SerializeClosure* soc); 409 static void print_archived_training_data_on(outputStream* st); 410 static TrainingData* lookup_archived_training_data(const Key* k); 411 #endif 412 413 template<typename TrainingDataType, typename... ArgTypes> 414 static TrainingDataType* allocate(ArgTypes... args) { 415 assert(need_data() || have_data(), ""); 416 if (TrainingDataLocker::can_add()) { 417 return new (mtClassShared) TrainingDataType(args...); 418 } 419 return nullptr; 420 } 421 }; 422 423 // Training data that is associated with an InstanceKlass 424 class KlassTrainingData : public TrainingData { 425 friend TrainingData; 426 friend CompileTrainingData; 427 428 // Used by CDS. These classes need to access the private default constructor. 429 template <class T> friend class CppVtableTesterA; 430 template <class T> friend class CppVtableTesterB; 431 template <class T> friend class CppVtableCloner; 432 433 // cross-link to live klass, or null if not loaded or encountered yet 434 InstanceKlass* _holder; 435 436 DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me 437 438 KlassTrainingData(); 439 KlassTrainingData(InstanceKlass* klass); 440 441 int comp_dep_count() const { 442 TrainingDataLocker::assert_locked(); 443 return _comp_deps.length(); 444 } 445 CompileTrainingData* comp_dep(int i) const { 446 TrainingDataLocker::assert_locked(); 447 return _comp_deps.at(i); 448 } 449 void add_comp_dep(CompileTrainingData* ctd) { 450 TrainingDataLocker::assert_locked(); 451 _comp_deps.append_if_missing(ctd); 452 } 453 void remove_comp_dep(CompileTrainingData* ctd) { 454 TrainingDataLocker::assert_locked(); 455 _comp_deps.remove_if_existing(ctd); 456 } 457 public: 458 Symbol* name() const { 459 precond(has_holder()); 460 return holder()->name(); 461 } 462 bool has_holder() const { return _holder != nullptr; } 463 InstanceKlass* holder() const { return _holder; } 464 465 static KlassTrainingData* make(InstanceKlass* holder, 466 bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr); 467 static KlassTrainingData* find(InstanceKlass* holder) { 468 return make(holder, true); 469 } 470 virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); }; 471 472 ClassLoaderData* class_loader_data() { 473 assert(has_holder(), ""); 474 return holder()->class_loader_data(); 475 } 476 void notice_fully_initialized() NOT_CDS_RETURN; 477 478 void print_on(outputStream* st, bool name_only) const; 479 virtual void print_on(outputStream* st) const { print_on(st, false); } 480 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 481 482 virtual void prepare(Visitor& visitor); 483 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 484 485 MetaspaceObj::Type type() const { 486 return KlassTrainingDataType; 487 } 488 489 #if INCLUDE_CDS 490 virtual void remove_unshareable_info(); 491 #endif 492 493 void metaspace_pointers_do(MetaspaceClosure *iter); 494 495 int size() const { 496 return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord); 497 } 498 499 const char* internal_name() const { 500 return "{ klass training data }"; 501 }; 502 503 void verify(); 504 505 static KlassTrainingData* allocate(InstanceKlass* holder) { 506 return TrainingData::allocate<KlassTrainingData>(holder); 507 } 508 509 template<typename Function> 510 void iterate_comp_deps(Function fn) const { // lambda enabled API 511 TrainingDataLocker l; 512 for (int i = 0; i < comp_dep_count(); i++) { 513 fn(comp_dep(i)); 514 } 515 } 516 }; 517 518 // Information about particular JIT tasks. 519 class CompileTrainingData : public TrainingData { 520 friend TrainingData; 521 friend KlassTrainingData; 522 523 // Used by CDS. These classes need to access the private default constructor. 524 template <class T> friend class CppVtableTesterA; 525 template <class T> friend class CppVtableTesterB; 526 template <class T> friend class CppVtableCloner; 527 528 MethodTrainingData* _method; 529 const short _level; 530 const int _compile_id; 531 532 // classes that should be initialized before this JIT task runs 533 DepList<KlassTrainingData*> _init_deps; 534 // Number of uninitialized classes left, when it's 0, all deps are satisfied 535 volatile int _init_deps_left; 536 537 public: 538 // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record 539 // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them. 540 class ciRecords { 541 template <typename... Ts> class Arguments { 542 public: 543 bool operator==(const Arguments<>&) const { return true; } 544 void metaspace_pointers_do(MetaspaceClosure *iter) { } 545 }; 546 template <typename T, typename... Ts> class Arguments<T, Ts...> { 547 private: 548 T _first; 549 Arguments<Ts...> _remaining; 550 551 public: 552 constexpr Arguments(const T& first, const Ts&... remaining) noexcept 553 : _first(first), _remaining(remaining...) {} 554 constexpr Arguments() noexcept : _first(), _remaining() {} 555 bool operator==(const Arguments<T, Ts...>& that) const { 556 return _first == that._first && _remaining == that._remaining; 557 } 558 template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)> 559 void metaspace_pointers_do(MetaspaceClosure *iter) { 560 iter->push(&_first); 561 _remaining.metaspace_pointers_do(iter); 562 } 563 template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))> 564 void metaspace_pointers_do(MetaspaceClosure *iter) { 565 _remaining.metaspace_pointers_do(iter); 566 } 567 }; 568 569 template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj { 570 public: 571 class OptionalReturnType { 572 bool _valid; 573 ReturnType _result; 574 public: 575 OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {} 576 bool is_valid() const { return _valid; } 577 ReturnType result() const { return _result; } 578 }; 579 private: 580 typedef Arguments<Args...> ArgumentsType; 581 class Record : public MetaspaceObj { 582 ReturnType _result; 583 ArgumentsType _arguments; 584 public: 585 Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {} 586 Record() { } 587 ReturnType result() const { return _result; } 588 ArgumentsType arguments() const { return _arguments; } 589 bool operator==(const Record& that) { return _arguments == that._arguments; } 590 void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); } 591 }; 592 DepList<Record> _data; 593 public: 594 OptionalReturnType find(const Args&... args) { 595 ArgumentsType a(args...); 596 for (int i = 0; i < _data.length(); i++) { 597 if (_data.at(i).arguments() == a) { 598 return OptionalReturnType(true, _data.at(i).result()); 599 } 600 } 601 return OptionalReturnType(false, ReturnType()); 602 } 603 bool append_if_missing(const ReturnType& result, const Args&... args) { 604 return _data.append_if_missing(Record(result, ArgumentsType(args...))); 605 } 606 #if INCLUDE_CDS 607 void remove_unshareable_info() { _data.remove_unshareable_info(); } 608 #endif 609 void prepare(ClassLoaderData* loader_data) { 610 _data.prepare(loader_data); 611 } 612 void metaspace_pointers_do(MetaspaceClosure *iter) { 613 _data.metaspace_pointers_do(iter); 614 } 615 }; 616 617 618 public: 619 // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to 620 // compile methods in a different order than in the training run. 621 typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type; 622 ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size; 623 #if INCLUDE_CDS 624 void remove_unshareable_info() { 625 ciMethod__inline_instructions_size.remove_unshareable_info(); 626 } 627 #endif 628 void prepare(ClassLoaderData* loader_data) { 629 ciMethod__inline_instructions_size.prepare(loader_data); 630 } 631 void metaspace_pointers_do(MetaspaceClosure *iter) { 632 ciMethod__inline_instructions_size.metaspace_pointers_do(iter); 633 } 634 }; 635 636 private: 637 ciRecords _ci_records; 638 639 CompileTrainingData(); 640 CompileTrainingData(MethodTrainingData* mtd, 641 int level, 642 int compile_id) 643 : TrainingData(), // empty key 644 _method(mtd), _level(level), _compile_id(compile_id), _init_deps_left(0) { } 645 public: 646 ciRecords& ci_records() { return _ci_records; } 647 static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr); 648 649 virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); }; 650 651 MethodTrainingData* method() const { return _method; } 652 653 int level() const { return _level; } 654 655 int compile_id() const { return _compile_id; } 656 657 int init_dep_count() const { 658 TrainingDataLocker::assert_locked(); 659 return _init_deps.length(); 660 } 661 KlassTrainingData* init_dep(int i) const { 662 TrainingDataLocker::assert_locked(); 663 return _init_deps.at(i); 664 } 665 void add_init_dep(KlassTrainingData* ktd) { 666 TrainingDataLocker::assert_locked(); 667 ktd->add_comp_dep(this); 668 _init_deps.append_if_missing(ktd); 669 } 670 void clear_init_deps() { 671 TrainingDataLocker::assert_locked(); 672 for (int i = 0; i < _init_deps.length(); i++) { 673 _init_deps.at(i)->remove_comp_dep(this); 674 } 675 _init_deps.clear(); 676 } 677 void dec_init_deps_left(KlassTrainingData* ktd); 678 int init_deps_left() const { 679 return Atomic::load(&_init_deps_left); 680 } 681 uint compute_init_deps_left(bool count_initialized = false); 682 683 void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN; 684 685 // The JIT looks at classes and objects too and can depend on their state. 686 // These simple calls just report the *possibility* of an observation. 687 void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN; 688 689 virtual void prepare(Visitor& visitor); 690 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 691 692 void print_on(outputStream* st, bool name_only) const; 693 virtual void print_on(outputStream* st) const { print_on(st, false); } 694 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 695 696 #if INCLUDE_CDS 697 virtual void remove_unshareable_info(); 698 #endif 699 700 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 701 virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; } 702 703 virtual const char* internal_name() const { 704 return "{ compile training data }"; 705 }; 706 707 virtual int size() const { 708 return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord); 709 } 710 711 void verify(); 712 713 static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) { 714 return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id); 715 } 716 }; 717 718 // Record information about a method at the time compilation is requested. 719 class MethodTrainingData : public TrainingData { 720 friend TrainingData; 721 friend CompileTrainingData; 722 723 // Used by CDS. These classes need to access the private default constructor. 724 template <class T> friend class CppVtableTesterA; 725 template <class T> friend class CppVtableTesterB; 726 template <class T> friend class CppVtableCloner; 727 728 KlassTrainingData* _klass; 729 Method* _holder; 730 CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1]; 731 int _highest_top_level; 732 int _level_mask; // bit-set of all possible levels 733 bool _was_toplevel; 734 // metadata snapshots of final state: 735 MethodCounters* _final_counters; 736 MethodData* _final_profile; 737 738 MethodTrainingData(); 739 MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) { 740 _klass = ktd; 741 _holder = method; 742 for (int i = 0; i < CompLevel_count - 1; i++) { 743 _last_toplevel_compiles[i] = nullptr; 744 } 745 _highest_top_level = CompLevel_none; 746 _level_mask = 0; 747 _was_toplevel = false; 748 } 749 750 static int level_mask(int level) { 751 return ((level & 0xF) != level ? 0 : 1 << level); 752 } 753 static CompLevel highest_level(int mask) { 754 if (mask == 0) return (CompLevel) 0; 755 int diff = (count_leading_zeros(level_mask(0)) - count_leading_zeros(mask)); 756 return (CompLevel) diff; 757 } 758 759 public: 760 KlassTrainingData* klass() const { return _klass; } 761 bool has_holder() const { return _holder != nullptr; } 762 Method* holder() const { return _holder; } 763 bool only_inlined() const { return !_was_toplevel; } 764 bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; } 765 int highest_level() const { return highest_level(_level_mask); } 766 int highest_top_level() const { return _highest_top_level; } 767 MethodData* final_profile() const { return _final_profile; } 768 769 Symbol* name() const { 770 precond(has_holder()); 771 return holder()->name(); 772 } 773 Symbol* signature() const { 774 precond(has_holder()); 775 return holder()->signature(); 776 } 777 778 CompileTrainingData* last_toplevel_compile(int level) const { 779 if (level > CompLevel_none) { 780 return _last_toplevel_compiles[level - 1]; 781 } 782 return nullptr; 783 } 784 785 void notice_compilation(int level, bool inlined = false) { 786 if (!inlined) { 787 _was_toplevel = true; 788 } 789 _level_mask |= level_mask(level); 790 } 791 792 void notice_toplevel_compilation(int level) { 793 _highest_top_level = MAX2(_highest_top_level, level); 794 } 795 796 static MethodTrainingData* make(const methodHandle& method, 797 bool null_if_not_found = false, 798 bool use_cache = true) NOT_CDS_RETURN_(nullptr); 799 static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); } 800 static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); } 801 802 virtual MethodTrainingData* as_MethodTrainingData() const { 803 return const_cast<MethodTrainingData*>(this); 804 }; 805 806 void print_on(outputStream* st, bool name_only) const; 807 virtual void print_on(outputStream* st) const { print_on(st, false); } 808 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 809 810 virtual void prepare(Visitor& visitor); 811 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 812 813 template<typename Function> 814 void iterate_compiles(Function fn) const { // lambda enabled API 815 for (int i = 0; i < CompLevel_count - 1; i++) { 816 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 817 if (ctd != nullptr) { 818 fn(ctd); 819 } 820 } 821 } 822 823 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 824 virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; } 825 826 #if INCLUDE_CDS 827 virtual void remove_unshareable_info(); 828 #endif 829 830 virtual int size() const { 831 return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord); 832 } 833 834 virtual const char* internal_name() const { 835 return "{ method training data }"; 836 }; 837 838 void verify(); 839 840 static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) { 841 return TrainingData::allocate<MethodTrainingData>(m, ktd); 842 } 843 }; 844 #endif // SHARE_OOPS_TRAININGDATA_HPP