< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page

  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"
  69 #include "oops/klass.inline.hpp"
  70 #include "oops/objArrayKlass.hpp"
  71 #include "oops/oop.inline.hpp"
  72 #include "oops/oopHandle.inline.hpp"

  73 #include "oops/weakHandle.inline.hpp"
  74 #include "runtime/arguments.hpp"
  75 #include "runtime/atomic.hpp"
  76 #include "runtime/handles.inline.hpp"
  77 #include "runtime/mutex.hpp"
  78 #include "runtime/safepoint.hpp"
  79 #include "utilities/growableArray.hpp"
  80 #include "utilities/macros.hpp"
  81 #include "utilities/ostream.hpp"
  82 
  83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  84 
  85 void ClassLoaderData::init_null_class_loader_data() {
  86   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  87   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  88 
  89   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  90   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  91   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  92 

 383 
 384 #ifdef ASSERT
 385     oop m = k->java_mirror();
 386     assert(m != NULL, "NULL mirror");
 387     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 388 #endif
 389     klass_closure->do_klass(k);
 390   }
 391 }
 392 
 393 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 394   // Lock-free access requires load_acquire
 395   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 396     if (k->is_instance_klass()) {
 397       f(InstanceKlass::cast(k));
 398     }
 399     assert(k != k->next_link(), "no loops!");
 400   }
 401 }
 402 










 403 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 404   assert_locked_or_safepoint(Module_lock);
 405   if (_unnamed_module != NULL) {
 406     f(_unnamed_module);
 407   }
 408   if (_modules != NULL) {
 409     _modules->modules_do(f);
 410   }
 411 }
 412 
 413 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 414   assert_locked_or_safepoint(Module_lock);
 415   if (_packages != NULL) {
 416     _packages->packages_do(f);
 417   }
 418 }
 419 
 420 void ClassLoaderData::record_dependency(const Klass* k) {
 421   assert(k != NULL, "invariant");
 422 

 537   }
 538   ShouldNotReachHere();   // should have found this class!!
 539 }
 540 
 541 void ClassLoaderData::unload() {
 542   _unloading = true;
 543 
 544   LogTarget(Trace, class, loader, data) lt;
 545   if (lt.is_enabled()) {
 546     ResourceMark rm;
 547     LogStream ls(lt);
 548     ls.print("unload");
 549     print_value_on(&ls);
 550     ls.cr();
 551   }
 552 
 553   // Some items on the _deallocate_list need to free their C heap structures
 554   // if they are not already on the _klasses list.
 555   free_deallocate_list_C_heap_structures();
 556 


 557   // Clean up class dependencies and tell serviceability tools
 558   // these classes are unloading.  Must be called
 559   // after erroneous classes are released.
 560   classes_do(InstanceKlass::unload_class);
 561 
 562   // Method::clear_jmethod_ids only sets the jmethod_ids to NULL without
 563   // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
 564   // This is done intentionally because native code (e.g. JVMTI agent) holding
 565   // jmethod_ids may access them after the associated classes and class loader
 566   // are unloaded. The Java Native Interface Specification says "method ID
 567   // does not prevent the VM from unloading the class from which the ID has
 568   // been derived. After the class is unloaded, the method or field ID becomes
 569   // invalid". In real world usages, the native code may rely on jmethod_ids
 570   // being NULL after class unloading. Hence, it is unsafe to free the memory
 571   // from the VM side without knowing when native code is going to stop using
 572   // them.
 573   if (_jmethod_ids != NULL) {
 574     Method::clear_jmethod_ids(this);
 575   }
 576 

 834 void ClassLoaderData::free_deallocate_list() {
 835   // This must be called at a safepoint because it depends on metadata walking at
 836   // safepoint cleanup time.
 837   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 838   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 839   if (_deallocate_list == NULL) {
 840     return;
 841   }
 842   // Go backwards because this removes entries that are freed.
 843   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 844     Metadata* m = _deallocate_list->at(i);
 845     if (!m->on_stack()) {
 846       _deallocate_list->remove_at(i);
 847       // There are only three types of metadata that we deallocate directly.
 848       // Cast them so they can be used by the template function.
 849       if (m->is_method()) {
 850         MetadataFactory::free_metadata(this, (Method*)m);
 851       } else if (m->is_constantPool()) {
 852         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 853       } else if (m->is_klass()) {
 854         MetadataFactory::free_metadata(this, (InstanceKlass*)m);




 855       } else {
 856         ShouldNotReachHere();
 857       }
 858     } else {
 859       // Metadata is alive.
 860       // If scratch_class is on stack then it shouldn't be on this list!
 861       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 862              "scratch classes on this list should be dead");
 863       // Also should assert that other metadata on the list was found in handles.
 864       // Some cleaning remains.
 865       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 866     }
 867   }
 868 }
 869 
 870 // This is distinct from free_deallocate_list.  For class loader data that are
 871 // unloading, this frees the C heap memory for items on the list, and unlinks
 872 // scratch or error classes so that unloading events aren't triggered for these
 873 // classes. The metadata is removed with the unloading metaspace.
 874 // There isn't C heap memory allocated for methods, so nothing is done for them.

  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"
  69 #include "oops/klass.inline.hpp"
  70 #include "oops/objArrayKlass.hpp"
  71 #include "oops/oop.inline.hpp"
  72 #include "oops/oopHandle.inline.hpp"
  73 #include "oops/inlineKlass.inline.hpp"
  74 #include "oops/weakHandle.inline.hpp"
  75 #include "runtime/arguments.hpp"
  76 #include "runtime/atomic.hpp"
  77 #include "runtime/handles.inline.hpp"
  78 #include "runtime/mutex.hpp"
  79 #include "runtime/safepoint.hpp"
  80 #include "utilities/growableArray.hpp"
  81 #include "utilities/macros.hpp"
  82 #include "utilities/ostream.hpp"
  83 
  84 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  85 
  86 void ClassLoaderData::init_null_class_loader_data() {
  87   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  88   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  89 
  90   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  91   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  92   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  93 

 384 
 385 #ifdef ASSERT
 386     oop m = k->java_mirror();
 387     assert(m != NULL, "NULL mirror");
 388     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 389 #endif
 390     klass_closure->do_klass(k);
 391   }
 392 }
 393 
 394 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 395   // Lock-free access requires load_acquire
 396   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 397     if (k->is_instance_klass()) {
 398       f(InstanceKlass::cast(k));
 399     }
 400     assert(k != k->next_link(), "no loops!");
 401   }
 402 }
 403 
 404 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
 405   // Lock-free access requires load_acquire
 406   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 407     if (k->is_inline_klass()) {
 408       f(InlineKlass::cast(k));
 409     }
 410     assert(k != k->next_link(), "no loops!");
 411   }
 412 }
 413 
 414 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 415   assert_locked_or_safepoint(Module_lock);
 416   if (_unnamed_module != NULL) {
 417     f(_unnamed_module);
 418   }
 419   if (_modules != NULL) {
 420     _modules->modules_do(f);
 421   }
 422 }
 423 
 424 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 425   assert_locked_or_safepoint(Module_lock);
 426   if (_packages != NULL) {
 427     _packages->packages_do(f);
 428   }
 429 }
 430 
 431 void ClassLoaderData::record_dependency(const Klass* k) {
 432   assert(k != NULL, "invariant");
 433 

 548   }
 549   ShouldNotReachHere();   // should have found this class!!
 550 }
 551 
 552 void ClassLoaderData::unload() {
 553   _unloading = true;
 554 
 555   LogTarget(Trace, class, loader, data) lt;
 556   if (lt.is_enabled()) {
 557     ResourceMark rm;
 558     LogStream ls(lt);
 559     ls.print("unload");
 560     print_value_on(&ls);
 561     ls.cr();
 562   }
 563 
 564   // Some items on the _deallocate_list need to free their C heap structures
 565   // if they are not already on the _klasses list.
 566   free_deallocate_list_C_heap_structures();
 567 
 568   inline_classes_do(InlineKlass::cleanup);
 569 
 570   // Clean up class dependencies and tell serviceability tools
 571   // these classes are unloading.  Must be called
 572   // after erroneous classes are released.
 573   classes_do(InstanceKlass::unload_class);
 574 
 575   // Method::clear_jmethod_ids only sets the jmethod_ids to NULL without
 576   // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
 577   // This is done intentionally because native code (e.g. JVMTI agent) holding
 578   // jmethod_ids may access them after the associated classes and class loader
 579   // are unloaded. The Java Native Interface Specification says "method ID
 580   // does not prevent the VM from unloading the class from which the ID has
 581   // been derived. After the class is unloaded, the method or field ID becomes
 582   // invalid". In real world usages, the native code may rely on jmethod_ids
 583   // being NULL after class unloading. Hence, it is unsafe to free the memory
 584   // from the VM side without knowing when native code is going to stop using
 585   // them.
 586   if (_jmethod_ids != NULL) {
 587     Method::clear_jmethod_ids(this);
 588   }
 589 

 847 void ClassLoaderData::free_deallocate_list() {
 848   // This must be called at a safepoint because it depends on metadata walking at
 849   // safepoint cleanup time.
 850   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 851   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 852   if (_deallocate_list == NULL) {
 853     return;
 854   }
 855   // Go backwards because this removes entries that are freed.
 856   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 857     Metadata* m = _deallocate_list->at(i);
 858     if (!m->on_stack()) {
 859       _deallocate_list->remove_at(i);
 860       // There are only three types of metadata that we deallocate directly.
 861       // Cast them so they can be used by the template function.
 862       if (m->is_method()) {
 863         MetadataFactory::free_metadata(this, (Method*)m);
 864       } else if (m->is_constantPool()) {
 865         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 866       } else if (m->is_klass()) {
 867         if (!((Klass*)m)->is_inline_klass()) {
 868           MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 869         } else {
 870           MetadataFactory::free_metadata(this, (InlineKlass*)m);
 871         }
 872       } else {
 873         ShouldNotReachHere();
 874       }
 875     } else {
 876       // Metadata is alive.
 877       // If scratch_class is on stack then it shouldn't be on this list!
 878       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 879              "scratch classes on this list should be dead");
 880       // Also should assert that other metadata on the list was found in handles.
 881       // Some cleaning remains.
 882       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 883     }
 884   }
 885 }
 886 
 887 // This is distinct from free_deallocate_list.  For class loader data that are
 888 // unloading, this frees the C heap memory for items on the list, and unlinks
 889 // scratch or error classes so that unloading events aren't triggered for these
 890 // classes. The metadata is removed with the unloading metaspace.
 891 // There isn't C heap memory allocated for methods, so nothing is done for them.
< prev index next >