1 /* 2 * Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OOPS_TRAININGDATA_HPP 26 #define SHARE_OOPS_TRAININGDATA_HPP 27 28 #include "classfile/classLoaderData.hpp" 29 #include "classfile/compactHashtable.hpp" 30 #include "compiler/compilerDefinitions.hpp" 31 #include "compiler/compiler_globals.hpp" 32 #include "memory/allocation.hpp" 33 #include "memory/metaspaceClosure.hpp" 34 #include "oops/instanceKlass.hpp" 35 #include "runtime/handles.hpp" 36 #include "runtime/mutexLocker.hpp" 37 #include "utilities/count_leading_zeros.hpp" 38 #include "utilities/resizeableResourceHash.hpp" 39 40 class ciEnv; 41 class ciBaseObject; 42 class CompileTask; 43 class CompileTrainingData; 44 class KlassTrainingData; 45 class MethodTrainingData; 46 47 // Base class for all the training data varieties 48 class TrainingData : public Metadata { 49 friend KlassTrainingData; 50 friend MethodTrainingData; 51 friend CompileTrainingData; 52 public: 53 // Key is used to insert any TrainingData (TD) object into a hash tables. The key is currently a 54 // pointer to a metaspace object the TD is associated with. For example, 55 // for KlassTrainingData it's an InstanceKlass, for MethodTrainingData it's a Method. 56 // The utility of the these hash tables is to be able to find a TD object for a given metaspace 57 // metaspace object. 58 class Key { 59 mutable Metadata* _meta; 60 // These guys can get to my constructors: 61 friend TrainingData; 62 friend KlassTrainingData; 63 friend MethodTrainingData; 64 friend CompileTrainingData; 65 66 // The empty key 67 Key() : _meta(nullptr) { } 68 bool is_empty() const { return _meta == nullptr; } 69 public: 70 Key(Metadata* meta) : _meta(meta) { } 71 72 static bool can_compute_cds_hash(const Key* const& k); 73 static uint cds_hash(const Key* const& k); 74 static unsigned hash(const Key* const& k) { 75 return primitive_hash(k->meta()); 76 } 77 static bool equals(const Key* const& k1, const Key* const& k2) { 78 return k1->meta() == k2->meta(); 79 } 80 static inline bool equals(TrainingData* value, const TrainingData::Key* key, int unused) { 81 return equals(value->key(), key); 82 } 83 int cmp(const Key* that) const { 84 auto m1 = this->meta(); 85 auto m2 = that->meta(); 86 if (m1 < m2) return -1; 87 if (m1 > m2) return +1; 88 return 0; 89 } 90 Metadata* meta() const { return _meta; } 91 void metaspace_pointers_do(MetaspaceClosure *iter); 92 void make_empty() const { _meta = nullptr; } 93 }; 94 95 // TrainingDataLocker is used to guard read/write operations on non-MT-safe data structures. 96 // It supports recursive locking and a read-only mode (in which case no locks are taken). 97 // It is also a part of the TD collection termination protocol (see the "spanshot" field). 98 class TrainingDataLocker { 99 static volatile bool _snapshot; // If true we're not allocating new training data 100 static int _lock_mode; 101 const bool _recursive; 102 static void lock() { 103 #if INCLUDE_CDS 104 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()"); 105 if (_lock_mode > 0) { 106 TrainingData_lock->lock(); 107 } 108 #endif 109 } 110 static void unlock() { 111 #if INCLUDE_CDS 112 if (_lock_mode > 0) { 113 TrainingData_lock->unlock(); 114 } 115 #endif 116 } 117 static bool safely_locked() { 118 #if INCLUDE_CDS 119 assert(_lock_mode != 0, "Forgot to call TrainingDataLocker::initialize()"); 120 if (_lock_mode > 0) { 121 return is_self_locked(); 122 } else { 123 return true; 124 } 125 #else 126 return true; 127 #endif 128 } 129 static bool is_self_locked() { 130 return CDS_ONLY(TrainingData_lock->owned_by_self()) NOT_CDS(false); 131 } 132 133 public: 134 static void snapshot() { 135 #if INCLUDE_CDS 136 assert_locked(); 137 _snapshot = true; 138 #endif 139 } 140 static bool can_add() { 141 #if INCLUDE_CDS 142 assert_locked(); 143 return !_snapshot; 144 #else 145 return false; 146 #endif 147 } 148 static void initialize() { 149 #if INCLUDE_CDS 150 _lock_mode = need_data() ? +1 : -1; // if -1, we go lock-free 151 #endif 152 } 153 static void assert_locked() { 154 assert(safely_locked(), "use under TrainingDataLocker"); 155 } 156 static void assert_can_add() { 157 assert(can_add(), "Cannot add TrainingData objects"); 158 } 159 TrainingDataLocker() : _recursive(is_self_locked()) { 160 if (!_recursive) { 161 lock(); 162 } 163 } 164 ~TrainingDataLocker() { 165 if (!_recursive) { 166 unlock(); 167 } 168 } 169 }; 170 171 // A set of TD objects that we collect during the training run. 172 class TrainingDataSet { 173 friend TrainingData; 174 ResizeableResourceHashtable<const Key*, TrainingData*, 175 AnyObj::C_HEAP, MemTag::mtCompiler, 176 &TrainingData::Key::hash, 177 &TrainingData::Key::equals> 178 _table; 179 180 public: 181 template<typename... Arg> 182 TrainingDataSet(Arg... arg) 183 : _table(arg...) { 184 } 185 TrainingData* find(const Key* key) const { 186 TrainingDataLocker::assert_locked(); 187 if (TrainingDataLocker::can_add()) { 188 auto res = _table.get(key); 189 return res == nullptr ? nullptr : *res; 190 } 191 return nullptr; 192 } 193 bool remove(const Key* key) { 194 return _table.remove(key); 195 } 196 TrainingData* install(TrainingData* td) { 197 TrainingDataLocker::assert_locked(); 198 TrainingDataLocker::assert_can_add(); 199 auto key = td->key(); 200 if (key->is_empty()) { 201 return td; // unkeyed TD not installed 202 } 203 bool created = false; 204 auto prior = _table.put_if_absent(key, td, &created); 205 if (prior == nullptr || *prior == td) { 206 return td; 207 } 208 assert(false, "no pre-existing elements allowed"); 209 return *prior; 210 } 211 template<typename Function> 212 void iterate(const Function& fn) const { // lambda enabled API 213 iterate(const_cast<Function&>(fn)); 214 } 215 template<typename Function> 216 void iterate(Function& fn) const { // lambda enabled API 217 return _table.iterate_all([&](const TrainingData::Key* k, TrainingData* td) { fn(td); }); 218 } 219 int size() const { return _table.number_of_entries(); } 220 221 void verify() const { 222 TrainingDataLocker::assert_locked(); 223 iterate([&](TrainingData* td) { td->verify(); }); 224 } 225 }; 226 227 // A widget to ensure that we visit TD object only once (TD objects can have pointer to 228 // other TD object that are sometimes circular). 229 class Visitor { 230 ResizeableResourceHashtable<TrainingData*, bool> _visited; 231 public: 232 Visitor(unsigned size) : _visited(size, 0x3fffffff) { } 233 bool is_visited(TrainingData* td) { 234 return _visited.contains(td); 235 } 236 void visit(TrainingData* td) { 237 bool created; 238 _visited.put_if_absent(td, &created); 239 } 240 }; 241 242 typedef OffsetCompactHashtable<const TrainingData::Key*, TrainingData*, TrainingData::Key::equals> TrainingDataDictionary; 243 private: 244 Key _key; 245 246 // just forward all constructor arguments to the embedded key 247 template<typename... Arg> 248 TrainingData(Arg... arg) 249 : _key(arg...) { } 250 251 // Container for recording TD during training run 252 static TrainingDataSet _training_data_set; 253 // Containter for replaying the training data (read-only, populated from the AOT image) 254 static TrainingDataDictionary _archived_training_data_dictionary; 255 // Container used for writing the AOT image 256 static TrainingDataDictionary _archived_training_data_dictionary_for_dumping; 257 class DumpTimeTrainingDataInfo { 258 TrainingData* _training_data; 259 public: 260 DumpTimeTrainingDataInfo() : DumpTimeTrainingDataInfo(nullptr) {} 261 DumpTimeTrainingDataInfo(TrainingData* training_data) : _training_data(training_data) {} 262 void metaspace_pointers_do(MetaspaceClosure* it) { 263 it->push(&_training_data); 264 } 265 TrainingData* training_data() { 266 return _training_data; 267 } 268 }; 269 typedef GrowableArrayCHeap<DumpTimeTrainingDataInfo, mtClassShared> DumptimeTrainingDataDictionary; 270 // A temporary container that is used to accumulate and filter TD during dumping 271 static DumptimeTrainingDataDictionary* _dumptime_training_data_dictionary; 272 273 static TrainingDataSet* training_data_set() { return &_training_data_set; } 274 static TrainingDataDictionary* archived_training_data_dictionary() { return &_archived_training_data_dictionary; } 275 276 public: 277 // Returns the key under which this TD is installed, or else 278 // Key::EMPTY if it is not installed. 279 const Key* key() const { return &_key; } 280 281 static bool have_data() { return AOTReplayTraining; } // Going to read 282 static bool need_data() { return AOTRecordTraining; } // Going to write 283 284 template<typename Function> 285 static void iterate(const Function& fn) { iterate(const_cast<Function&>(fn)); } 286 287 template<typename Function> 288 static void iterate(Function& fn) { // lambda enabled API 289 TrainingDataLocker l; 290 if (have_data()) { 291 archived_training_data_dictionary()->iterate(fn); 292 } 293 if (need_data()) { 294 training_data_set()->iterate(fn); 295 } 296 } 297 298 virtual MethodTrainingData* as_MethodTrainingData() const { return nullptr; } 299 virtual KlassTrainingData* as_KlassTrainingData() const { return nullptr; } 300 virtual CompileTrainingData* as_CompileTrainingData() const { return nullptr; } 301 bool is_MethodTrainingData() const { return as_MethodTrainingData() != nullptr; } 302 bool is_KlassTrainingData() const { return as_KlassTrainingData() != nullptr; } 303 bool is_CompileTrainingData() const { return as_CompileTrainingData() != nullptr; } 304 305 virtual void prepare(Visitor& visitor) = 0; 306 virtual void cleanup(Visitor& visitor) = 0; 307 308 static void initialize() NOT_CDS_RETURN; 309 310 static void verify() NOT_CDS_RETURN; 311 312 // Widget for recording dependencies, as an N-to-M graph relation, 313 // possibly cyclic. 314 template<typename E> 315 class DepList : public StackObj { 316 GrowableArrayCHeap<E, mtCompiler>* _deps_dyn; 317 Array<E>* _deps; 318 public: 319 DepList() { 320 _deps_dyn = nullptr; 321 _deps = nullptr; 322 } 323 324 int length() const { 325 return (_deps_dyn != nullptr ? _deps_dyn->length() 326 : _deps != nullptr ? _deps->length() 327 : 0); 328 } 329 E* adr_at(int i) const { 330 return (_deps_dyn != nullptr ? _deps_dyn->adr_at(i) 331 : _deps != nullptr ? _deps->adr_at(i) 332 : nullptr); 333 } 334 E at(int i) const { 335 assert(i >= 0 && i < length(), "oob"); 336 return *adr_at(i); 337 } 338 bool append_if_missing(E dep) { 339 if (_deps_dyn == nullptr) { 340 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10); 341 _deps_dyn->append(dep); 342 return true; 343 } else { 344 return _deps_dyn->append_if_missing(dep); 345 } 346 } 347 bool remove_if_existing(E dep) { 348 if (_deps_dyn != nullptr) { 349 return _deps_dyn->remove_if_existing(dep); 350 } 351 return false; 352 } 353 void clear() { 354 if (_deps_dyn != nullptr) { 355 _deps_dyn->clear(); 356 } 357 } 358 void append(E dep) { 359 if (_deps_dyn == nullptr) { 360 _deps_dyn = new GrowableArrayCHeap<E, mtCompiler>(10); 361 } 362 _deps_dyn->append(dep); 363 } 364 bool contains(E dep) { 365 for (int i = 0; i < length(); i++) { 366 if (dep == at(i)) { 367 return true; // found 368 } 369 } 370 return false; // not found 371 } 372 373 #if INCLUDE_CDS 374 void remove_unshareable_info() { 375 _deps_dyn = nullptr; 376 } 377 #endif 378 void prepare(ClassLoaderData* loader_data); 379 void metaspace_pointers_do(MetaspaceClosure *iter); 380 }; 381 382 virtual void metaspace_pointers_do(MetaspaceClosure *iter); 383 384 static void init_dumptime_table(TRAPS); 385 386 #if INCLUDE_CDS 387 virtual void remove_unshareable_info() {} 388 static void iterate_roots(MetaspaceClosure* it); 389 static void dump_training_data(); 390 static void cleanup_training_data(); 391 static void serialize(SerializeClosure* soc); 392 static void print_archived_training_data_on(outputStream* st); 393 static void write_training_data_dictionary(TrainingDataDictionary* dictionary); 394 395 static TrainingData* lookup_archived_training_data(const Key* k); 396 #endif 397 398 template<typename TrainingDataType, typename... ArgTypes> 399 static TrainingDataType* allocate(ArgTypes... args) { 400 assert(need_data() || have_data(), ""); 401 if (TrainingDataLocker::can_add()) { 402 return new (mtClassShared) TrainingDataType(args...); 403 } 404 return nullptr; 405 } 406 }; 407 408 // Training data that is associated with an InstanceKlass 409 class KlassTrainingData : public TrainingData { 410 friend TrainingData; 411 friend CompileTrainingData; 412 413 // Used by CDS. These classes need to access the private default constructor. 414 template <class T> friend class CppVtableTesterA; 415 template <class T> friend class CppVtableTesterB; 416 template <class T> friend class CppVtableCloner; 417 418 // cross-link to live klass, or null if not loaded or encountered yet 419 InstanceKlass* _holder; 420 jobject _holder_mirror; // extra link to prevent unloading by GC 421 422 DepList<CompileTrainingData*> _comp_deps; // compiles that depend on me 423 424 KlassTrainingData(); 425 KlassTrainingData(InstanceKlass* klass); 426 427 int comp_dep_count() const { 428 TrainingDataLocker::assert_locked(); 429 return _comp_deps.length(); 430 } 431 CompileTrainingData* comp_dep(int i) const { 432 TrainingDataLocker::assert_locked(); 433 return _comp_deps.at(i); 434 } 435 void add_comp_dep(CompileTrainingData* ctd) { 436 TrainingDataLocker::assert_locked(); 437 _comp_deps.append_if_missing(ctd); 438 } 439 void remove_comp_dep(CompileTrainingData* ctd) { 440 TrainingDataLocker::assert_locked(); 441 _comp_deps.remove_if_existing(ctd); 442 } 443 444 public: 445 Symbol* name() const { 446 precond(has_holder()); 447 return holder()->name(); 448 } 449 bool has_holder() const { return _holder != nullptr; } 450 InstanceKlass* holder() const { return _holder; } 451 452 static KlassTrainingData* make(InstanceKlass* holder, 453 bool null_if_not_found = false) NOT_CDS_RETURN_(nullptr); 454 static KlassTrainingData* find(InstanceKlass* holder) { 455 return make(holder, true); 456 } 457 virtual KlassTrainingData* as_KlassTrainingData() const { return const_cast<KlassTrainingData*>(this); }; 458 459 ClassLoaderData* class_loader_data() { 460 assert(has_holder(), ""); 461 return holder()->class_loader_data(); 462 } 463 void notice_fully_initialized() NOT_CDS_RETURN; 464 465 void print_on(outputStream* st, bool name_only) const; 466 virtual void print_on(outputStream* st) const { print_on(st, false); } 467 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 468 469 virtual void prepare(Visitor& visitor); 470 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 471 472 MetaspaceObj::Type type() const { 473 return KlassTrainingDataType; 474 } 475 476 #if INCLUDE_CDS 477 virtual void remove_unshareable_info(); 478 #endif 479 480 void metaspace_pointers_do(MetaspaceClosure *iter); 481 482 int size() const { 483 return (int)align_metadata_size(align_up(sizeof(KlassTrainingData), BytesPerWord)/BytesPerWord); 484 } 485 486 const char* internal_name() const { 487 return "{ klass training data }"; 488 }; 489 490 void verify(); 491 492 static KlassTrainingData* allocate(InstanceKlass* holder) { 493 return TrainingData::allocate<KlassTrainingData>(holder); 494 } 495 496 template<typename Function> 497 void iterate_comp_deps(Function fn) const { // lambda enabled API 498 TrainingDataLocker l; 499 for (int i = 0; i < comp_dep_count(); i++) { 500 fn(comp_dep(i)); 501 } 502 } 503 }; 504 505 // Information about particular JIT tasks. 506 class CompileTrainingData : public TrainingData { 507 friend TrainingData; 508 friend KlassTrainingData; 509 510 // Used by CDS. These classes need to access the private default constructor. 511 template <class T> friend class CppVtableTesterA; 512 template <class T> friend class CppVtableTesterB; 513 template <class T> friend class CppVtableCloner; 514 515 MethodTrainingData* _method; 516 const short _level; 517 const int _compile_id; 518 519 // classes that should be initialized before this JIT task runs 520 DepList<KlassTrainingData*> _init_deps; 521 // Number of uninitialized classes left, when it's 0, all deps are satisfied 522 volatile int _init_deps_left; 523 524 public: 525 // ciRecords is a generic meachanism to memoize CI responses to arbitary queries. For each function we're interested in we record 526 // (return_value, argument_values) tuples in a list. Arguments are allowed to have Metaspace pointers in them. 527 class ciRecords { 528 template <typename... Ts> class Arguments { 529 public: 530 bool operator==(const Arguments<>&) const { return true; } 531 void metaspace_pointers_do(MetaspaceClosure *iter) { } 532 }; 533 template <typename T, typename... Ts> class Arguments<T, Ts...> { 534 private: 535 T _first; 536 Arguments<Ts...> _remaining; 537 538 public: 539 constexpr Arguments(const T& first, const Ts&... remaining) noexcept 540 : _first(first), _remaining(remaining...) {} 541 constexpr Arguments() noexcept : _first(), _remaining() {} 542 bool operator==(const Arguments<T, Ts...>& that) const { 543 return _first == that._first && _remaining == that._remaining; 544 } 545 template<typename U = T, ENABLE_IF(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value)> 546 void metaspace_pointers_do(MetaspaceClosure *iter) { 547 iter->push(&_first); 548 _remaining.metaspace_pointers_do(iter); 549 } 550 template<typename U = T, ENABLE_IF(!(std::is_pointer<U>::value && std::is_base_of<MetaspaceObj, typename std::remove_pointer<U>::type>::value))> 551 void metaspace_pointers_do(MetaspaceClosure *iter) { 552 _remaining.metaspace_pointers_do(iter); 553 } 554 }; 555 556 template <typename ReturnType, typename... Args> class ciMemoizedFunction : public StackObj { 557 public: 558 class OptionalReturnType { 559 bool _valid; 560 ReturnType _result; 561 public: 562 OptionalReturnType(bool valid, const ReturnType& result) : _valid(valid), _result(result) {} 563 bool is_valid() const { return _valid; } 564 ReturnType result() const { return _result; } 565 }; 566 private: 567 typedef Arguments<Args...> ArgumentsType; 568 class Record : public MetaspaceObj { 569 ReturnType _result; 570 ArgumentsType _arguments; 571 public: 572 Record(const ReturnType& result, const ArgumentsType& arguments) : _result(result), _arguments(arguments) {} 573 Record() { } 574 ReturnType result() const { return _result; } 575 ArgumentsType arguments() const { return _arguments; } 576 bool operator==(const Record& that) { return _arguments == that._arguments; } 577 void metaspace_pointers_do(MetaspaceClosure *iter) { _arguments.metaspace_pointers_do(iter); } 578 }; 579 DepList<Record> _data; 580 public: 581 OptionalReturnType find(const Args&... args) { 582 ArgumentsType a(args...); 583 for (int i = 0; i < _data.length(); i++) { 584 if (_data.at(i).arguments() == a) { 585 return OptionalReturnType(true, _data.at(i).result()); 586 } 587 } 588 return OptionalReturnType(false, ReturnType()); 589 } 590 bool append_if_missing(const ReturnType& result, const Args&... args) { 591 return _data.append_if_missing(Record(result, ArgumentsType(args...))); 592 } 593 #if INCLUDE_CDS 594 void remove_unshareable_info() { _data.remove_unshareable_info(); } 595 #endif 596 void prepare(ClassLoaderData* loader_data) { 597 _data.prepare(loader_data); 598 } 599 void metaspace_pointers_do(MetaspaceClosure *iter) { 600 _data.metaspace_pointers_do(iter); 601 } 602 }; 603 604 605 public: 606 // Record CI answers for the InlineSmallCode heuristic. It is importance since the heuristic is non-commutative and we may want to 607 // compile methods in a different order than in the training run. 608 typedef ciMemoizedFunction<int, MethodTrainingData*> ciMethod__inline_instructions_size_type; 609 ciMethod__inline_instructions_size_type ciMethod__inline_instructions_size; 610 #if INCLUDE_CDS 611 void remove_unshareable_info() { 612 ciMethod__inline_instructions_size.remove_unshareable_info(); 613 } 614 #endif 615 void prepare(ClassLoaderData* loader_data) { 616 ciMethod__inline_instructions_size.prepare(loader_data); 617 } 618 void metaspace_pointers_do(MetaspaceClosure *iter) { 619 ciMethod__inline_instructions_size.metaspace_pointers_do(iter); 620 } 621 }; 622 623 private: 624 ciRecords _ci_records; 625 626 CompileTrainingData(); 627 CompileTrainingData(MethodTrainingData* mtd, 628 int level, 629 int compile_id) 630 : TrainingData(), // empty key 631 _method(mtd), _level(level), _compile_id(compile_id), _init_deps_left(0) { } 632 public: 633 ciRecords& ci_records() { return _ci_records; } 634 static CompileTrainingData* make(CompileTask* task) NOT_CDS_RETURN_(nullptr); 635 636 virtual CompileTrainingData* as_CompileTrainingData() const { return const_cast<CompileTrainingData*>(this); }; 637 638 MethodTrainingData* method() const { return _method; } 639 640 int level() const { return _level; } 641 642 int compile_id() const { return _compile_id; } 643 644 int init_dep_count() const { 645 TrainingDataLocker::assert_locked(); 646 return _init_deps.length(); 647 } 648 KlassTrainingData* init_dep(int i) const { 649 TrainingDataLocker::assert_locked(); 650 return _init_deps.at(i); 651 } 652 void add_init_dep(KlassTrainingData* ktd) { 653 TrainingDataLocker::assert_locked(); 654 ktd->add_comp_dep(this); 655 _init_deps.append_if_missing(ktd); 656 } 657 void clear_init_deps() { 658 TrainingDataLocker::assert_locked(); 659 for (int i = 0; i < _init_deps.length(); i++) { 660 _init_deps.at(i)->remove_comp_dep(this); 661 } 662 _init_deps.clear(); 663 } 664 void dec_init_deps_left(KlassTrainingData* ktd); 665 int init_deps_left() const { 666 return Atomic::load(&_init_deps_left); 667 } 668 uint compute_init_deps_left(bool count_initialized = false); 669 670 void notice_inlined_method(CompileTask* task, const methodHandle& method) NOT_CDS_RETURN; 671 672 // The JIT looks at classes and objects too and can depend on their state. 673 // These simple calls just report the *possibility* of an observation. 674 void notice_jit_observation(ciEnv* env, ciBaseObject* what) NOT_CDS_RETURN; 675 676 virtual void prepare(Visitor& visitor); 677 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 678 679 void print_on(outputStream* st, bool name_only) const; 680 virtual void print_on(outputStream* st) const { print_on(st, false); } 681 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 682 683 #if INCLUDE_CDS 684 virtual void remove_unshareable_info(); 685 #endif 686 687 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 688 virtual MetaspaceObj::Type type() const { return CompileTrainingDataType; } 689 690 virtual const char* internal_name() const { 691 return "{ compile training data }"; 692 }; 693 694 virtual int size() const { 695 return (int)align_metadata_size(align_up(sizeof(CompileTrainingData), BytesPerWord)/BytesPerWord); 696 } 697 698 void verify(); 699 700 static CompileTrainingData* allocate(MethodTrainingData* mtd, int level, int compile_id) { 701 return TrainingData::allocate<CompileTrainingData>(mtd, level, compile_id); 702 } 703 }; 704 705 // Record information about a method at the time compilation is requested. 706 class MethodTrainingData : public TrainingData { 707 friend TrainingData; 708 friend CompileTrainingData; 709 710 // Used by CDS. These classes need to access the private default constructor. 711 template <class T> friend class CppVtableTesterA; 712 template <class T> friend class CppVtableTesterB; 713 template <class T> friend class CppVtableCloner; 714 715 KlassTrainingData* _klass; 716 Method* _holder; 717 CompileTrainingData* _last_toplevel_compiles[CompLevel_count - 1]; 718 int _highest_top_level; 719 int _level_mask; // bit-set of all possible levels 720 bool _was_inlined; 721 bool _was_toplevel; 722 // metadata snapshots of final state: 723 MethodCounters* _final_counters; 724 MethodData* _final_profile; 725 726 MethodTrainingData(); 727 MethodTrainingData(Method* method, KlassTrainingData* ktd) : TrainingData(method) { 728 _klass = ktd; 729 _holder = method; 730 for (int i = 0; i < CompLevel_count - 1; i++) { 731 _last_toplevel_compiles[i] = nullptr; 732 } 733 _highest_top_level = CompLevel_none; 734 _level_mask = 0; 735 _was_inlined = _was_toplevel = false; 736 } 737 738 static int level_mask(int level) { 739 return ((level & 0xF) != level ? 0 : 1 << level); 740 } 741 static CompLevel highest_level(int mask) { 742 if (mask == 0) return (CompLevel) 0; 743 int diff = (count_leading_zeros(level_mask(0)) - count_leading_zeros(mask)); 744 return (CompLevel) diff; 745 } 746 747 public: 748 KlassTrainingData* klass() const { return _klass; } 749 bool has_holder() const { return _holder != nullptr; } 750 Method* holder() const { return _holder; } 751 bool only_inlined() const { return !_was_toplevel; } 752 bool never_inlined() const { return !_was_inlined; } 753 bool saw_level(CompLevel l) const { return (_level_mask & level_mask(l)) != 0; } 754 int highest_level() const { return highest_level(_level_mask); } 755 int highest_top_level() const { return _highest_top_level; } 756 MethodData* final_profile() const { return _final_profile; } 757 758 Symbol* name() const { 759 precond(has_holder()); 760 return holder()->name(); 761 } 762 Symbol* signature() const { 763 precond(has_holder()); 764 return holder()->signature(); 765 } 766 767 CompileTrainingData* last_toplevel_compile(int level) const { 768 if (level > CompLevel_none) { 769 return _last_toplevel_compiles[level - 1]; 770 } 771 return nullptr; 772 } 773 774 void notice_compilation(int level, bool inlined = false) { 775 if (inlined) { 776 _was_inlined = true; 777 } else { 778 _was_toplevel = true; 779 } 780 _level_mask |= level_mask(level); 781 } 782 783 void notice_toplevel_compilation(int level) { 784 _highest_top_level = MAX2(_highest_top_level, level); 785 } 786 787 static MethodTrainingData* make(const methodHandle& method, 788 bool null_if_not_found = false, 789 bool use_cache = true) NOT_CDS_RETURN_(nullptr); 790 static MethodTrainingData* find_fast(const methodHandle& method) { return make(method, true, true); } 791 static MethodTrainingData* find(const methodHandle& method) { return make(method, true, false); } 792 793 virtual MethodTrainingData* as_MethodTrainingData() const { 794 return const_cast<MethodTrainingData*>(this); 795 }; 796 797 void print_on(outputStream* st, bool name_only) const; 798 virtual void print_on(outputStream* st) const { print_on(st, false); } 799 virtual void print_value_on(outputStream* st) const { print_on(st, true); } 800 801 virtual void prepare(Visitor& visitor); 802 virtual void cleanup(Visitor& visitor) NOT_CDS_RETURN; 803 804 template<typename Function> 805 void iterate_compiles(Function fn) const { // lambda enabled API 806 for (int i = 0; i < CompLevel_count - 1; i++) { 807 CompileTrainingData* ctd = _last_toplevel_compiles[i]; 808 if (ctd != nullptr) { 809 fn(ctd); 810 } 811 } 812 } 813 814 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 815 virtual MetaspaceObj::Type type() const { return MethodTrainingDataType; } 816 817 #if INCLUDE_CDS 818 virtual void remove_unshareable_info(); 819 #endif 820 821 virtual int size() const { 822 return (int)align_metadata_size(align_up(sizeof(MethodTrainingData), BytesPerWord)/BytesPerWord); 823 } 824 825 virtual const char* internal_name() const { 826 return "{ method training data }"; 827 }; 828 829 void verify(); 830 831 static MethodTrainingData* allocate(Method* m, KlassTrainingData* ktd) { 832 return TrainingData::allocate<MethodTrainingData>(m, ktd); 833 } 834 }; 835 #endif // SHARE_OOPS_TRAININGDATA_HPP