< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page

  49 #include "cds/heapShared.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.inline.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"

  69 #include "oops/jmethodIDTable.hpp"
  70 #include "oops/klass.inline.hpp"
  71 #include "oops/oop.inline.hpp"
  72 #include "oops/oopHandle.inline.hpp"
  73 #include "oops/verifyOopClosure.hpp"
  74 #include "oops/weakHandle.inline.hpp"
  75 #include "runtime/arguments.hpp"
  76 #include "runtime/atomicAccess.hpp"
  77 #include "runtime/handles.inline.hpp"
  78 #include "runtime/mutex.hpp"
  79 #include "runtime/safepoint.hpp"
  80 #include "utilities/growableArray.hpp"
  81 #include "utilities/macros.hpp"
  82 #include "utilities/ostream.hpp"
  83 
  84 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
  85 
  86 void ClassLoaderData::init_null_class_loader_data() {
  87   assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
  88   assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");

 428 
 429 #ifdef ASSERT
 430     oop m = k->java_mirror();
 431     assert(m != nullptr, "nullptr mirror");
 432     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 433 #endif
 434     klass_closure->do_klass(k);
 435   }
 436 }
 437 
 438 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 439   // Lock-free access requires load_acquire
 440   for (Klass* k = AtomicAccess::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
 441     if (k->is_instance_klass()) {
 442       f(InstanceKlass::cast(k));
 443     }
 444     assert(k != k->next_link(), "no loops!");
 445   }
 446 }
 447 










 448 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 449   assert_locked_or_safepoint(Module_lock);
 450   if (_unnamed_module != nullptr) {
 451     f(_unnamed_module);
 452   }
 453   if (_modules != nullptr) {
 454     _modules->modules_do(f);
 455   }
 456 }
 457 
 458 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 459   assert_locked_or_safepoint(Module_lock);
 460   if (_packages != nullptr) {
 461     _packages->packages_do(f);
 462   }
 463 }
 464 
 465 void ClassLoaderData::record_dependency(const Klass* k) {
 466   assert(k != nullptr, "invariant");
 467 

 606   delete _jmethod_ids;
 607   _jmethod_ids = nullptr;
 608 }
 609 
 610 void ClassLoaderData::unload() {
 611   _unloading = true;
 612 
 613   LogTarget(Trace, class, loader, data) lt;
 614   if (lt.is_enabled()) {
 615     ResourceMark rm;
 616     LogStream ls(lt);
 617     ls.print("unload");
 618     print_value_on(&ls);
 619     ls.cr();
 620   }
 621 
 622   // Some items on the _deallocate_list need to free their C heap structures
 623   // if they are not already on the _klasses list.
 624   free_deallocate_list_C_heap_structures();
 625 


 626   // Clean up class dependencies and tell serviceability tools
 627   // these classes are unloading.  This must be called
 628   // after erroneous classes are released.
 629   classes_do(InstanceKlass::unload_class);
 630 
 631   if (_jmethod_ids != nullptr) {
 632     remove_jmethod_ids();
 633   }
 634 }
 635 
 636 ModuleEntryTable* ClassLoaderData::modules() {
 637   // Lazily create the module entry table at first request.
 638   // Lock-free access requires load_acquire.
 639   ModuleEntryTable* modules = AtomicAccess::load_acquire(&_modules);
 640   if (modules == nullptr) {
 641     MutexLocker m1(Module_lock);
 642     // Check if _modules got allocated while we were waiting for this lock.
 643     if ((modules = _modules) == nullptr) {
 644       modules = new ModuleEntryTable();
 645 

 886   // This must be called at a safepoint because it depends on metadata walking at
 887   // safepoint cleanup time.
 888   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 889   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 890   if (_deallocate_list == nullptr) {
 891     return;
 892   }
 893   // Go backwards because this removes entries that are freed.
 894   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 895     Metadata* m = _deallocate_list->at(i);
 896     if (!m->on_stack()) {
 897       _deallocate_list->remove_at(i);
 898       // There are only three types of metadata that we deallocate directly.
 899       // Cast them so they can be used by the template function.
 900       if (m->is_method()) {
 901         MetadataFactory::free_metadata(this, (Method*)m);
 902       } else if (m->is_constantPool()) {
 903         HeapShared::remove_scratch_resolved_references((ConstantPool*)m);
 904         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 905       } else if (m->is_klass()) {
 906         MetadataFactory::free_metadata(this, (InstanceKlass*)m);




 907       } else {
 908         ShouldNotReachHere();
 909       }
 910     } else {
 911       // Metadata is alive.
 912       // If scratch_class is on stack then it shouldn't be on this list!
 913       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 914              "scratch classes on this list should be dead");
 915       // Also should assert that other metadata on the list was found in handles.
 916       // Some cleaning remains.
 917       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 918     }
 919   }
 920 }
 921 
 922 // This is distinct from free_deallocate_list.  For class loader data that are
 923 // unloading, this frees the C heap memory for items on the list, and unlinks
 924 // scratch or error classes so that unloading events aren't triggered for these
 925 // classes. The metadata is removed with the unloading metaspace.
 926 // There isn't C heap memory allocated for methods, so nothing is done for them.

  49 #include "cds/heapShared.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.inline.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"
  69 #include "oops/inlineKlass.inline.hpp"
  70 #include "oops/jmethodIDTable.hpp"
  71 #include "oops/klass.inline.hpp"
  72 #include "oops/oop.inline.hpp"
  73 #include "oops/oopHandle.inline.hpp"
  74 #include "oops/verifyOopClosure.hpp"
  75 #include "oops/weakHandle.inline.hpp"
  76 #include "runtime/arguments.hpp"
  77 #include "runtime/atomicAccess.hpp"
  78 #include "runtime/handles.inline.hpp"
  79 #include "runtime/mutex.hpp"
  80 #include "runtime/safepoint.hpp"
  81 #include "utilities/growableArray.hpp"
  82 #include "utilities/macros.hpp"
  83 #include "utilities/ostream.hpp"
  84 
  85 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
  86 
  87 void ClassLoaderData::init_null_class_loader_data() {
  88   assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
  89   assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");

 429 
 430 #ifdef ASSERT
 431     oop m = k->java_mirror();
 432     assert(m != nullptr, "nullptr mirror");
 433     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 434 #endif
 435     klass_closure->do_klass(k);
 436   }
 437 }
 438 
 439 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 440   // Lock-free access requires load_acquire
 441   for (Klass* k = AtomicAccess::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
 442     if (k->is_instance_klass()) {
 443       f(InstanceKlass::cast(k));
 444     }
 445     assert(k != k->next_link(), "no loops!");
 446   }
 447 }
 448 
 449 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
 450   // Lock-free access requires load_acquire
 451   for (Klass* k = AtomicAccess::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
 452     if (k->is_inline_klass()) {
 453       f(InlineKlass::cast(k));
 454     }
 455     assert(k != k->next_link(), "no loops!");
 456   }
 457 }
 458 
 459 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 460   assert_locked_or_safepoint(Module_lock);
 461   if (_unnamed_module != nullptr) {
 462     f(_unnamed_module);
 463   }
 464   if (_modules != nullptr) {
 465     _modules->modules_do(f);
 466   }
 467 }
 468 
 469 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 470   assert_locked_or_safepoint(Module_lock);
 471   if (_packages != nullptr) {
 472     _packages->packages_do(f);
 473   }
 474 }
 475 
 476 void ClassLoaderData::record_dependency(const Klass* k) {
 477   assert(k != nullptr, "invariant");
 478 

 617   delete _jmethod_ids;
 618   _jmethod_ids = nullptr;
 619 }
 620 
 621 void ClassLoaderData::unload() {
 622   _unloading = true;
 623 
 624   LogTarget(Trace, class, loader, data) lt;
 625   if (lt.is_enabled()) {
 626     ResourceMark rm;
 627     LogStream ls(lt);
 628     ls.print("unload");
 629     print_value_on(&ls);
 630     ls.cr();
 631   }
 632 
 633   // Some items on the _deallocate_list need to free their C heap structures
 634   // if they are not already on the _klasses list.
 635   free_deallocate_list_C_heap_structures();
 636 
 637   inline_classes_do(InlineKlass::cleanup);
 638 
 639   // Clean up class dependencies and tell serviceability tools
 640   // these classes are unloading.  This must be called
 641   // after erroneous classes are released.
 642   classes_do(InstanceKlass::unload_class);
 643 
 644   if (_jmethod_ids != nullptr) {
 645     remove_jmethod_ids();
 646   }
 647 }
 648 
 649 ModuleEntryTable* ClassLoaderData::modules() {
 650   // Lazily create the module entry table at first request.
 651   // Lock-free access requires load_acquire.
 652   ModuleEntryTable* modules = AtomicAccess::load_acquire(&_modules);
 653   if (modules == nullptr) {
 654     MutexLocker m1(Module_lock);
 655     // Check if _modules got allocated while we were waiting for this lock.
 656     if ((modules = _modules) == nullptr) {
 657       modules = new ModuleEntryTable();
 658 

 899   // This must be called at a safepoint because it depends on metadata walking at
 900   // safepoint cleanup time.
 901   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 902   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 903   if (_deallocate_list == nullptr) {
 904     return;
 905   }
 906   // Go backwards because this removes entries that are freed.
 907   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 908     Metadata* m = _deallocate_list->at(i);
 909     if (!m->on_stack()) {
 910       _deallocate_list->remove_at(i);
 911       // There are only three types of metadata that we deallocate directly.
 912       // Cast them so they can be used by the template function.
 913       if (m->is_method()) {
 914         MetadataFactory::free_metadata(this, (Method*)m);
 915       } else if (m->is_constantPool()) {
 916         HeapShared::remove_scratch_resolved_references((ConstantPool*)m);
 917         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 918       } else if (m->is_klass()) {
 919         if (!((Klass*)m)->is_inline_klass()) {
 920           MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 921         } else {
 922           MetadataFactory::free_metadata(this, (InlineKlass*)m);
 923         }
 924       } else {
 925         ShouldNotReachHere();
 926       }
 927     } else {
 928       // Metadata is alive.
 929       // If scratch_class is on stack then it shouldn't be on this list!
 930       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 931              "scratch classes on this list should be dead");
 932       // Also should assert that other metadata on the list was found in handles.
 933       // Some cleaning remains.
 934       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 935     }
 936   }
 937 }
 938 
 939 // This is distinct from free_deallocate_list.  For class loader data that are
 940 // unloading, this frees the C heap memory for items on the list, and unlinks
 941 // scratch or error classes so that unloading events aren't triggered for these
 942 // classes. The metadata is removed with the unloading metaspace.
 943 // There isn't C heap memory allocated for methods, so nothing is done for them.
< prev index next >