< prev index next >

src/hotspot/share/oops/trainingData.cpp

Print this page

 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"

 40 #include "oops/trainingData.hpp"
 41 #include "runtime/arguments.hpp"
 42 #include "runtime/javaThread.inline.hpp"
 43 #include "runtime/jniHandles.inline.hpp"
 44 #include "utilities/growableArray.hpp"
 45 
 46 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 47 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 49 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 50 int TrainingData::TrainingDataLocker::_lock_mode;
 51 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 52 
 53 MethodTrainingData::MethodTrainingData() {
 54   // Used by cppVtables.cpp only
 55   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 56 }
 57 
 58 KlassTrainingData::KlassTrainingData() {
 59   // Used by cppVtables.cpp only
 60   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 61 }
 62 
 63 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 64   // Used by cppVtables.cpp only
 65   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 66 }
 67 
 68 void TrainingData::initialize() {
 69   // this is a nop if training modes are not enabled
 70   if (have_data() || need_data()) {
 71     // Data structures that we have do not currently support iterative training. So you cannot replay
 72     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 73     guarantee(have_data() != need_data(), "Iterative training is not supported");
 74     TrainingDataLocker::initialize();
 75   }

 76 }
 77 
 78 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 79   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 80   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 81   guarantee(td == td1, "");
 82 }
 83 
 84 void TrainingData::verify() {
 85   if (TrainingData::have_data()) {
 86     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 87       if (td->is_KlassTrainingData()) {
 88         KlassTrainingData* ktd = td->as_KlassTrainingData();
 89         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 90           Key k(ktd->holder());
 91           verify_archived_entry(td, &k);
 92         }
 93         ktd->verify();
 94       } else if (td->is_MethodTrainingData()) {
 95         MethodTrainingData* mtd = td->as_MethodTrainingData();

203   if (m->method_holder() == nullptr) {
204     return nullptr; // do not record (dynamically generated method)
205   }
206   MethodTrainingData* mtd = MethodTrainingData::make(m);
207   if (mtd == nullptr) {
208     return nullptr; // allocation failure
209   }
210   mtd->notice_compilation(level);
211 
212   TrainingDataLocker l;
213   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
214   if (ctd != nullptr) {
215     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
216     if (last_ctd != nullptr) {
217       assert(mtd->highest_top_level() >= level, "consistency");
218       if (last_ctd->compile_id() < compile_id) {
219         last_ctd->clear_init_deps();
220         last_ctd = ctd;
221       }
222     } else {
223        last_ctd = ctd;
224        mtd->notice_toplevel_compilation(level);
225     }
226   }
227   return ctd;
228 }
229 
230 
231 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
232   LogStreamHandle(Trace, training) log;
233   if (log.is_enabled()) {
234     log.print("CTD "); print_on(&log); log.cr();
235     log.print("KTD "); ktd->print_on(&log); log.cr();
236   }
237   assert(ktd!= nullptr && ktd->has_holder(), "");
238   assert(_init_deps.contains(ktd), "");
239   assert(_init_deps_left > 0, "");
240 
241   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
242 
243   if (log.is_enabled()) {
244     uint init_deps_left2 = compute_init_deps_left();

476     });
477   }
478   if (need_data()) {
479     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
480     TrainingDataLocker l;
481     TrainingDataLocker::snapshot();
482 
483     ResourceMark rm;
484     Visitor visitor(training_data_set()->size());
485     training_data_set()->iterate([&](TrainingData* td) {
486       td->prepare(visitor);
487       if (!td->is_CompileTrainingData()) {
488         _dumptime_training_data_dictionary->append(td);
489       }
490     });
491 
492     if (AOTVerifyTrainingData) {
493       training_data_set()->verify();
494     }
495   }


496 }
497 
498 void TrainingData::iterate_roots(MetaspaceClosure* it) {
499   if (_dumptime_training_data_dictionary != nullptr) {
500     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
501       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
502     }
503   }

504 }
505 
506 void TrainingData::dump_training_data() {
507   if (_dumptime_training_data_dictionary != nullptr) {
508     CompactHashtableStats stats;
509     _archived_training_data_dictionary_for_dumping.reset();
510     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
511     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
512       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
513 #ifdef ASSERT
514       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
515         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
516         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
517       }
518 #endif // ASSERT
519       td = ArchiveBuilder::current()->get_buffered_addr(td);
520       uint hash = TrainingData::Key::cds_hash(td->key());
521       u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
522       writer.add(hash, delta);
523     }

530     ResourceMark rm;
531     Visitor visitor(_dumptime_training_data_dictionary->length());
532     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
533       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
534       td->cleanup(visitor);
535     }
536     // Throw away all elements with empty keys
537     int j = 0;
538     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
539       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
540       if (td->key()->is_empty()) {
541         continue;
542       }
543       if (i != j) { // no need to copy if it's the same
544         _dumptime_training_data_dictionary->at_put(j, td);
545       }
546       j++;
547     }
548     _dumptime_training_data_dictionary->trunc_to(j);
549   }

550 }
551 
552 void KlassTrainingData::cleanup(Visitor& visitor) {
553   if (visitor.is_visited(this)) {
554     return;
555   }
556   visitor.visit(this);
557   if (has_holder()) {
558     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
559     if (is_excluded) {
560       ResourceMark rm;
561       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
562       _holder = nullptr;
563       key()->make_empty();
564     }
565   }
566   for (int i = 0; i < _comp_deps.length(); i++) {
567     _comp_deps.at(i)->cleanup(visitor);
568   }
569 }

635       ktd->print_on(tty); tty->cr();
636     }
637     guarantee(ktd->_comp_deps.contains(this), "");
638   }
639 }
640 
641 void CompileTrainingData::cleanup(Visitor& visitor) {
642   if (visitor.is_visited(this)) {
643     return;
644   }
645   visitor.visit(this);
646   method()->cleanup(visitor);
647 }
648 
649 void TrainingData::serialize(SerializeClosure* soc) {
650   if (soc->writing()) {
651     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
652   } else {
653     _archived_training_data_dictionary.serialize_header(soc);
654   }

655 }
656 
657 class TrainingDataPrinter : StackObj {
658   outputStream* _st;
659   int _index;
660 public:
661   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
662   void do_value(TrainingData* td) {
663     const char* type = (td->is_KlassTrainingData()   ? "K" :
664                         td->is_MethodTrainingData()  ? "M" :
665                         td->is_CompileTrainingData() ? "C" : "?");
666     _st->print("%4d: %p %s ", _index++, td, type);
667     td->print_on(_st);
668     _st->cr();
669     if (td->is_KlassTrainingData()) {
670       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
671         ResourceMark rm;
672         _st->print_raw("  C ");
673         ctd->print_on(_st);
674         _st->cr();
675       });
676     } else if (td->is_MethodTrainingData()) {
677       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
678         ResourceMark rm;
679         _st->print_raw("  C ");
680         ctd->print_on(_st);
681         _st->cr();
682       });
683     } else if (td->is_CompileTrainingData()) {
684       // ?
685     }
686   }
687 };
688 
689 void TrainingData::print_archived_training_data_on(outputStream* st) {
690   st->print_cr("Archived TrainingData Dictionary");
691   TrainingDataPrinter tdp(st);
692   TrainingDataLocker::initialize();
693   _archived_training_data_dictionary.iterate(&tdp);

694 }
695 
696 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
697   iter->push(const_cast<Metadata**>(&_meta));
698 }
699 
700 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
701   _key.metaspace_pointers_do(iter);
702 }
703 
704 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
705   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
706 }
707 
708 uint TrainingData::Key::cds_hash(const Key* const& k) {
709   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
710 }
711 
712 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
713   // For this to work, all components of the key must be in shared metaspace.

 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciEnv.hpp"
 26 #include "ci/ciMetadata.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/metaspaceShared.hpp"
 29 #include "classfile/classLoaderData.hpp"
 30 #include "classfile/compactHashtable.hpp"
 31 #include "classfile/javaClasses.hpp"
 32 #include "classfile/symbolTable.hpp"
 33 #include "classfile/systemDictionaryShared.hpp"
 34 #include "compiler/compileTask.hpp"
 35 #include "memory/metadataFactory.hpp"
 36 #include "memory/metaspaceClosure.hpp"
 37 #include "memory/resourceArea.hpp"
 38 #include "oops/method.hpp"
 39 #include "oops/methodCounters.hpp"
 40 #include "oops/recompilationSchedule.hpp"
 41 #include "oops/trainingData.hpp"
 42 #include "runtime/arguments.hpp"
 43 #include "runtime/javaThread.inline.hpp"
 44 #include "runtime/jniHandles.inline.hpp"
 45 #include "utilities/growableArray.hpp"
 46 
 47 TrainingData::TrainingDataSet TrainingData::_training_data_set(1024, 0x3fffffff);
 48 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary;
 49 TrainingData::TrainingDataDictionary TrainingData::_archived_training_data_dictionary_for_dumping;
 50 TrainingData::DumptimeTrainingDataDictionary* TrainingData::_dumptime_training_data_dictionary = nullptr;
 51 int TrainingData::TrainingDataLocker::_lock_mode;
 52 volatile bool TrainingData::TrainingDataLocker::_snapshot = false;
 53 
 54 MethodTrainingData::MethodTrainingData() {
 55   // Used by cppVtables.cpp only
 56   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 57 }
 58 
 59 KlassTrainingData::KlassTrainingData() {
 60   // Used by cppVtables.cpp only
 61   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 62 }
 63 
 64 CompileTrainingData::CompileTrainingData() : _level(-1), _compile_id(-1) {
 65   // Used by cppVtables.cpp only
 66   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
 67 }
 68 
 69 void TrainingData::initialize() {
 70   // this is a nop if training modes are not enabled
 71   if (have_data() || need_data()) {
 72     // Data structures that we have do not currently support iterative training. So you cannot replay
 73     // and train at the same time. Going forward we may want to adjust iteration/search to enable that.
 74     guarantee(have_data() != need_data(), "Iterative training is not supported");
 75     TrainingDataLocker::initialize();
 76   }
 77   RecompilationSchedule::initialize();
 78 }
 79 
 80 static void verify_archived_entry(TrainingData* td, const TrainingData::Key* k) {
 81   guarantee(TrainingData::Key::can_compute_cds_hash(k), "");
 82   TrainingData* td1 = TrainingData::lookup_archived_training_data(k);
 83   guarantee(td == td1, "");
 84 }
 85 
 86 void TrainingData::verify() {
 87   if (TrainingData::have_data()) {
 88     archived_training_data_dictionary()->iterate([&](TrainingData* td) {
 89       if (td->is_KlassTrainingData()) {
 90         KlassTrainingData* ktd = td->as_KlassTrainingData();
 91         if (ktd->has_holder() && ktd->holder()->is_loaded()) {
 92           Key k(ktd->holder());
 93           verify_archived_entry(td, &k);
 94         }
 95         ktd->verify();
 96       } else if (td->is_MethodTrainingData()) {
 97         MethodTrainingData* mtd = td->as_MethodTrainingData();

205   if (m->method_holder() == nullptr) {
206     return nullptr; // do not record (dynamically generated method)
207   }
208   MethodTrainingData* mtd = MethodTrainingData::make(m);
209   if (mtd == nullptr) {
210     return nullptr; // allocation failure
211   }
212   mtd->notice_compilation(level);
213 
214   TrainingDataLocker l;
215   CompileTrainingData* ctd = CompileTrainingData::allocate(mtd, level, compile_id);
216   if (ctd != nullptr) {
217     CompileTrainingData*& last_ctd = mtd->_last_toplevel_compiles[level - 1];
218     if (last_ctd != nullptr) {
219       assert(mtd->highest_top_level() >= level, "consistency");
220       if (last_ctd->compile_id() < compile_id) {
221         last_ctd->clear_init_deps();
222         last_ctd = ctd;
223       }
224     } else {
225       last_ctd = ctd;
226       mtd->notice_toplevel_compilation(level);
227     }
228   }
229   return ctd;
230 }
231 
232 
233 void CompileTrainingData::dec_init_deps_left(KlassTrainingData* ktd) {
234   LogStreamHandle(Trace, training) log;
235   if (log.is_enabled()) {
236     log.print("CTD "); print_on(&log); log.cr();
237     log.print("KTD "); ktd->print_on(&log); log.cr();
238   }
239   assert(ktd!= nullptr && ktd->has_holder(), "");
240   assert(_init_deps.contains(ktd), "");
241   assert(_init_deps_left > 0, "");
242 
243   uint init_deps_left1 = Atomic::sub(&_init_deps_left, 1);
244 
245   if (log.is_enabled()) {
246     uint init_deps_left2 = compute_init_deps_left();

478     });
479   }
480   if (need_data()) {
481     _dumptime_training_data_dictionary = new DumptimeTrainingDataDictionary();
482     TrainingDataLocker l;
483     TrainingDataLocker::snapshot();
484 
485     ResourceMark rm;
486     Visitor visitor(training_data_set()->size());
487     training_data_set()->iterate([&](TrainingData* td) {
488       td->prepare(visitor);
489       if (!td->is_CompileTrainingData()) {
490         _dumptime_training_data_dictionary->append(td);
491       }
492     });
493 
494     if (AOTVerifyTrainingData) {
495       training_data_set()->verify();
496     }
497   }
498 
499   RecompilationSchedule::prepare(CHECK);
500 }
501 
502 void TrainingData::iterate_roots(MetaspaceClosure* it) {
503   if (_dumptime_training_data_dictionary != nullptr) {
504     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
505       _dumptime_training_data_dictionary->at(i).metaspace_pointers_do(it);
506     }
507   }
508   RecompilationSchedule::iterate_roots(it);
509 }
510 
511 void TrainingData::dump_training_data() {
512   if (_dumptime_training_data_dictionary != nullptr) {
513     CompactHashtableStats stats;
514     _archived_training_data_dictionary_for_dumping.reset();
515     CompactHashtableWriter writer(_dumptime_training_data_dictionary->length(), &stats);
516     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
517       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
518 #ifdef ASSERT
519       for (int j = i+1; j < _dumptime_training_data_dictionary->length(); j++) {
520         TrainingData* td1 = _dumptime_training_data_dictionary->at(j).training_data();
521         assert(!TrainingData::Key::equals(td1, td->key(), -1), "conflict");
522       }
523 #endif // ASSERT
524       td = ArchiveBuilder::current()->get_buffered_addr(td);
525       uint hash = TrainingData::Key::cds_hash(td->key());
526       u4 delta = ArchiveBuilder::current()->buffer_to_offset_u4((address)td);
527       writer.add(hash, delta);
528     }

535     ResourceMark rm;
536     Visitor visitor(_dumptime_training_data_dictionary->length());
537     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
538       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
539       td->cleanup(visitor);
540     }
541     // Throw away all elements with empty keys
542     int j = 0;
543     for (int i = 0; i < _dumptime_training_data_dictionary->length(); i++) {
544       TrainingData* td = _dumptime_training_data_dictionary->at(i).training_data();
545       if (td->key()->is_empty()) {
546         continue;
547       }
548       if (i != j) { // no need to copy if it's the same
549         _dumptime_training_data_dictionary->at_put(j, td);
550       }
551       j++;
552     }
553     _dumptime_training_data_dictionary->trunc_to(j);
554   }
555   RecompilationSchedule::cleanup();
556 }
557 
558 void KlassTrainingData::cleanup(Visitor& visitor) {
559   if (visitor.is_visited(this)) {
560     return;
561   }
562   visitor.visit(this);
563   if (has_holder()) {
564     bool is_excluded = !holder()->is_loaded() || SystemDictionaryShared::check_for_exclusion(holder(), nullptr);
565     if (is_excluded) {
566       ResourceMark rm;
567       log_debug(aot, training)("Cleanup KTD %s", name()->as_klass_external_name());
568       _holder = nullptr;
569       key()->make_empty();
570     }
571   }
572   for (int i = 0; i < _comp_deps.length(); i++) {
573     _comp_deps.at(i)->cleanup(visitor);
574   }
575 }

641       ktd->print_on(tty); tty->cr();
642     }
643     guarantee(ktd->_comp_deps.contains(this), "");
644   }
645 }
646 
647 void CompileTrainingData::cleanup(Visitor& visitor) {
648   if (visitor.is_visited(this)) {
649     return;
650   }
651   visitor.visit(this);
652   method()->cleanup(visitor);
653 }
654 
655 void TrainingData::serialize(SerializeClosure* soc) {
656   if (soc->writing()) {
657     _archived_training_data_dictionary_for_dumping.serialize_header(soc);
658   } else {
659     _archived_training_data_dictionary.serialize_header(soc);
660   }
661   RecompilationSchedule::serialize(soc);
662 }
663 
664 class TrainingDataPrinter : StackObj {
665   outputStream* _st;
666   int _index;
667 public:
668   TrainingDataPrinter(outputStream* st) : _st(st), _index(0) {}
669   void do_value(TrainingData* td) {
670     const char* type = (td->is_KlassTrainingData()   ? "K" :
671                         td->is_MethodTrainingData()  ? "M" :
672                         td->is_CompileTrainingData() ? "C" : "?");
673     _st->print("%4d: %p %s ", _index++, td, type);
674     td->print_on(_st);
675     _st->cr();
676     if (td->is_KlassTrainingData()) {
677       td->as_KlassTrainingData()->iterate_comp_deps([&](CompileTrainingData* ctd) {
678         ResourceMark rm;
679         _st->print_raw("  C ");
680         ctd->print_on(_st);
681         _st->cr();
682       });
683     } else if (td->is_MethodTrainingData()) {
684       td->as_MethodTrainingData()->iterate_compiles([&](CompileTrainingData* ctd) {
685         ResourceMark rm;
686         _st->print_raw("  C ");
687         ctd->print_on(_st);
688         _st->cr();
689       });
690     } else if (td->is_CompileTrainingData()) {
691       // ?
692     }
693   }
694 };
695 
696 void TrainingData::print_archived_training_data_on(outputStream* st) {
697   st->print_cr("Archived TrainingData Dictionary");
698   TrainingDataPrinter tdp(st);
699   TrainingDataLocker::initialize();
700   _archived_training_data_dictionary.iterate(&tdp);
701   RecompilationSchedule::print_archived_training_data_on(st);
702 }
703 
704 void TrainingData::Key::metaspace_pointers_do(MetaspaceClosure *iter) {
705   iter->push(const_cast<Metadata**>(&_meta));
706 }
707 
708 void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
709   _key.metaspace_pointers_do(iter);
710 }
711 
712 bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
713   return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
714 }
715 
716 uint TrainingData::Key::cds_hash(const Key* const& k) {
717   return SystemDictionaryShared::hash_for_shared_dictionary((address)k->meta());
718 }
719 
720 TrainingData* TrainingData::lookup_archived_training_data(const Key* k) {
721   // For this to work, all components of the key must be in shared metaspace.
< prev index next >