< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page




  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "logging/log.hpp"
  59 #include "logging/logStream.hpp"
  60 #include "memory/allocation.inline.hpp"
  61 #include "memory/metadataFactory.hpp"
  62 #include "memory/resourceArea.hpp"
  63 #include "oops/access.inline.hpp"
  64 #include "oops/oop.inline.hpp"
  65 #include "oops/oopHandle.inline.hpp"

  66 #include "oops/weakHandle.inline.hpp"
  67 #include "runtime/atomic.hpp"
  68 #include "runtime/handles.inline.hpp"
  69 #include "runtime/mutex.hpp"
  70 #include "runtime/orderAccess.hpp"
  71 #include "runtime/safepoint.hpp"
  72 #include "utilities/growableArray.hpp"
  73 #include "utilities/macros.hpp"
  74 #include "utilities/ostream.hpp"
  75 
  76 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  77 
  78 void ClassLoaderData::init_null_class_loader_data() {
  79   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  80   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  81 
  82   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  83   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  84   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  85 


 344 #ifdef ASSERT
 345       oop m = k->java_mirror();
 346       assert(m != NULL, "NULL mirror");
 347       assert(m->is_a(SystemDictionary::Class_klass()), "invalid mirror");
 348 #endif
 349       klass_closure->do_klass(k);
 350     }
 351   }
 352 }
 353 
 354 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 355   // Lock-free access requires load_acquire
 356   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 357     if (k->is_instance_klass()) {
 358       f(InstanceKlass::cast(k));
 359     }
 360     assert(k != k->next_link(), "no loops!");
 361   }
 362 }
 363 










 364 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 365   assert_locked_or_safepoint(Module_lock);
 366   if (_unnamed_module != NULL) {
 367     f(_unnamed_module);
 368   }
 369   if (_modules != NULL) {
 370     for (int i = 0; i < _modules->table_size(); i++) {
 371       for (ModuleEntry* entry = _modules->bucket(i);
 372            entry != NULL;
 373            entry = entry->next()) {
 374         f(entry);
 375       }
 376     }
 377   }
 378 }
 379 
 380 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 381   assert_locked_or_safepoint(Module_lock);
 382   if (_packages != NULL) {
 383     for (int i = 0; i < _packages->table_size(); i++) {


 510   }
 511   ShouldNotReachHere();   // should have found this class!!
 512 }
 513 
 514 void ClassLoaderData::unload() {
 515   _unloading = true;
 516 
 517   LogTarget(Trace, class, loader, data) lt;
 518   if (lt.is_enabled()) {
 519     ResourceMark rm;
 520     LogStream ls(lt);
 521     ls.print("unload");
 522     print_value_on(&ls);
 523     ls.cr();
 524   }
 525 
 526   // Some items on the _deallocate_list need to free their C heap structures
 527   // if they are not already on the _klasses list.
 528   free_deallocate_list_C_heap_structures();
 529 


 530   // Clean up class dependencies and tell serviceability tools
 531   // these classes are unloading.  Must be called
 532   // after erroneous classes are released.
 533   classes_do(InstanceKlass::unload_class);
 534 
 535   // Clean up global class iterator for compiler
 536   ClassLoaderDataGraph::adjust_saved_class(this);
 537 }
 538 
 539 ModuleEntryTable* ClassLoaderData::modules() {
 540   // Lazily create the module entry table at first request.
 541   // Lock-free access requires load_acquire.
 542   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 543   if (modules == NULL) {
 544     MutexLocker m1(Module_lock);
 545     // Check if _modules got allocated while we were waiting for this lock.
 546     if ((modules = _modules) == NULL) {
 547       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 548 
 549       {


 804 void ClassLoaderData::free_deallocate_list() {
 805   // This must be called at a safepoint because it depends on metadata walking at
 806   // safepoint cleanup time.
 807   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 808   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 809   if (_deallocate_list == NULL) {
 810     return;
 811   }
 812   // Go backwards because this removes entries that are freed.
 813   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 814     Metadata* m = _deallocate_list->at(i);
 815     if (!m->on_stack()) {
 816       _deallocate_list->remove_at(i);
 817       // There are only three types of metadata that we deallocate directly.
 818       // Cast them so they can be used by the template function.
 819       if (m->is_method()) {
 820         MetadataFactory::free_metadata(this, (Method*)m);
 821       } else if (m->is_constantPool()) {
 822         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 823       } else if (m->is_klass()) {
 824         MetadataFactory::free_metadata(this, (InstanceKlass*)m);




 825       } else {
 826         ShouldNotReachHere();
 827       }
 828     } else {
 829       // Metadata is alive.
 830       // If scratch_class is on stack then it shouldn't be on this list!
 831       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 832              "scratch classes on this list should be dead");
 833       // Also should assert that other metadata on the list was found in handles.
 834       // Some cleaning remains.
 835       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 836     }
 837   }
 838 }
 839 
 840 // This is distinct from free_deallocate_list.  For class loader data that are
 841 // unloading, this frees the C heap memory for items on the list, and unlinks
 842 // scratch or error classes so that unloading events aren't triggered for these
 843 // classes. The metadata is removed with the unloading metaspace.
 844 // There isn't C heap memory allocated for methods, so nothing is done for them.




  46 // The bootstrap loader (represented by NULL) also has a ClassLoaderData,
  47 // the singleton class the_null_class_loader_data().
  48 
  49 #include "precompiled.hpp"
  50 #include "classfile/classLoaderData.inline.hpp"
  51 #include "classfile/classLoaderDataGraph.inline.hpp"
  52 #include "classfile/dictionary.hpp"
  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "logging/log.hpp"
  59 #include "logging/logStream.hpp"
  60 #include "memory/allocation.inline.hpp"
  61 #include "memory/metadataFactory.hpp"
  62 #include "memory/resourceArea.hpp"
  63 #include "oops/access.inline.hpp"
  64 #include "oops/oop.inline.hpp"
  65 #include "oops/oopHandle.inline.hpp"
  66 #include "oops/valueKlass.hpp"
  67 #include "oops/weakHandle.inline.hpp"
  68 #include "runtime/atomic.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/mutex.hpp"
  71 #include "runtime/orderAccess.hpp"
  72 #include "runtime/safepoint.hpp"
  73 #include "utilities/growableArray.hpp"
  74 #include "utilities/macros.hpp"
  75 #include "utilities/ostream.hpp"
  76 
  77 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  78 
  79 void ClassLoaderData::init_null_class_loader_data() {
  80   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  81   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  82 
  83   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  84   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  85   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  86 


 345 #ifdef ASSERT
 346       oop m = k->java_mirror();
 347       assert(m != NULL, "NULL mirror");
 348       assert(m->is_a(SystemDictionary::Class_klass()), "invalid mirror");
 349 #endif
 350       klass_closure->do_klass(k);
 351     }
 352   }
 353 }
 354 
 355 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 356   // Lock-free access requires load_acquire
 357   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 358     if (k->is_instance_klass()) {
 359       f(InstanceKlass::cast(k));
 360     }
 361     assert(k != k->next_link(), "no loops!");
 362   }
 363 }
 364 
 365 void ClassLoaderData::value_classes_do(void f(ValueKlass*)) {
 366   // Lock-free access requires load_acquire
 367   for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 368     if (k->is_value()) {
 369       f(ValueKlass::cast(k));
 370     }
 371     assert(k != k->next_link(), "no loops!");
 372   }
 373 }
 374 
 375 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 376   assert_locked_or_safepoint(Module_lock);
 377   if (_unnamed_module != NULL) {
 378     f(_unnamed_module);
 379   }
 380   if (_modules != NULL) {
 381     for (int i = 0; i < _modules->table_size(); i++) {
 382       for (ModuleEntry* entry = _modules->bucket(i);
 383            entry != NULL;
 384            entry = entry->next()) {
 385         f(entry);
 386       }
 387     }
 388   }
 389 }
 390 
 391 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 392   assert_locked_or_safepoint(Module_lock);
 393   if (_packages != NULL) {
 394     for (int i = 0; i < _packages->table_size(); i++) {


 521   }
 522   ShouldNotReachHere();   // should have found this class!!
 523 }
 524 
 525 void ClassLoaderData::unload() {
 526   _unloading = true;
 527 
 528   LogTarget(Trace, class, loader, data) lt;
 529   if (lt.is_enabled()) {
 530     ResourceMark rm;
 531     LogStream ls(lt);
 532     ls.print("unload");
 533     print_value_on(&ls);
 534     ls.cr();
 535   }
 536 
 537   // Some items on the _deallocate_list need to free their C heap structures
 538   // if they are not already on the _klasses list.
 539   free_deallocate_list_C_heap_structures();
 540 
 541   value_classes_do(ValueKlass::cleanup);
 542 
 543   // Clean up class dependencies and tell serviceability tools
 544   // these classes are unloading.  Must be called
 545   // after erroneous classes are released.
 546   classes_do(InstanceKlass::unload_class);
 547 
 548   // Clean up global class iterator for compiler
 549   ClassLoaderDataGraph::adjust_saved_class(this);
 550 }
 551 
 552 ModuleEntryTable* ClassLoaderData::modules() {
 553   // Lazily create the module entry table at first request.
 554   // Lock-free access requires load_acquire.
 555   ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
 556   if (modules == NULL) {
 557     MutexLocker m1(Module_lock);
 558     // Check if _modules got allocated while we were waiting for this lock.
 559     if ((modules = _modules) == NULL) {
 560       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 561 
 562       {


 817 void ClassLoaderData::free_deallocate_list() {
 818   // This must be called at a safepoint because it depends on metadata walking at
 819   // safepoint cleanup time.
 820   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 821   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 822   if (_deallocate_list == NULL) {
 823     return;
 824   }
 825   // Go backwards because this removes entries that are freed.
 826   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 827     Metadata* m = _deallocate_list->at(i);
 828     if (!m->on_stack()) {
 829       _deallocate_list->remove_at(i);
 830       // There are only three types of metadata that we deallocate directly.
 831       // Cast them so they can be used by the template function.
 832       if (m->is_method()) {
 833         MetadataFactory::free_metadata(this, (Method*)m);
 834       } else if (m->is_constantPool()) {
 835         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 836       } else if (m->is_klass()) {
 837         if (!((Klass*)m)->is_value()) {
 838           MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 839         } else {
 840           MetadataFactory::free_metadata(this, (ValueKlass*)m);
 841         }
 842       } else {
 843         ShouldNotReachHere();
 844       }
 845     } else {
 846       // Metadata is alive.
 847       // If scratch_class is on stack then it shouldn't be on this list!
 848       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 849              "scratch classes on this list should be dead");
 850       // Also should assert that other metadata on the list was found in handles.
 851       // Some cleaning remains.
 852       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 853     }
 854   }
 855 }
 856 
 857 // This is distinct from free_deallocate_list.  For class loader data that are
 858 // unloading, this frees the C heap memory for items on the list, and unlinks
 859 // scratch or error classes so that unloading events aren't triggered for these
 860 // classes. The metadata is removed with the unloading metaspace.
 861 // There isn't C heap memory allocated for methods, so nothing is done for them.


< prev index next >