< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page

  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"
  69 #include "oops/klass.inline.hpp"
  70 #include "oops/objArrayKlass.hpp"
  71 #include "oops/oop.inline.hpp"
  72 #include "oops/oopHandle.inline.hpp"

  73 #include "oops/weakHandle.inline.hpp"
  74 #include "runtime/arguments.hpp"
  75 #include "runtime/atomic.hpp"
  76 #include "runtime/handles.inline.hpp"
  77 #include "runtime/mutex.hpp"
  78 #include "runtime/safepoint.hpp"
  79 #include "utilities/growableArray.hpp"
  80 #include "utilities/macros.hpp"
  81 #include "utilities/ostream.hpp"
  82 
  83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  84 
  85 void ClassLoaderData::init_null_class_loader_data() {
  86   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  87   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  88 
  89   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  90   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  91   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  92 

 377 
 378 #ifdef ASSERT
 379     oop m = k->java_mirror();
 380     assert(m != NULL, "NULL mirror");
 381     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 382 #endif
 383     klass_closure->do_klass(k);
 384   }
 385 }
 386 
 387 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 388   // Lock-free access requires load_acquire
 389   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 390     if (k->is_instance_klass()) {
 391       f(InstanceKlass::cast(k));
 392     }
 393     assert(k != k->next_link(), "no loops!");
 394   }
 395 }
 396 










 397 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 398   assert_locked_or_safepoint(Module_lock);
 399   if (_unnamed_module != NULL) {
 400     f(_unnamed_module);
 401   }
 402   if (_modules != NULL) {
 403     _modules->modules_do(f);
 404   }
 405 }
 406 
 407 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 408   assert_locked_or_safepoint(Module_lock);
 409   if (_packages != NULL) {
 410     _packages->packages_do(f);
 411   }
 412 }
 413 
 414 void ClassLoaderData::record_dependency(const Klass* k) {
 415   assert(k != NULL, "invariant");
 416 

 531   }
 532   ShouldNotReachHere();   // should have found this class!!
 533 }
 534 
 535 void ClassLoaderData::unload() {
 536   _unloading = true;
 537 
 538   LogTarget(Trace, class, loader, data) lt;
 539   if (lt.is_enabled()) {
 540     ResourceMark rm;
 541     LogStream ls(lt);
 542     ls.print("unload");
 543     print_value_on(&ls);
 544     ls.cr();
 545   }
 546 
 547   // Some items on the _deallocate_list need to free their C heap structures
 548   // if they are not already on the _klasses list.
 549   free_deallocate_list_C_heap_structures();
 550 


 551   // Clean up class dependencies and tell serviceability tools
 552   // these classes are unloading.  Must be called
 553   // after erroneous classes are released.
 554   classes_do(InstanceKlass::unload_class);
 555 
 556   // Method::clear_jmethod_ids only sets the jmethod_ids to NULL without
 557   // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
 558   // This is done intentionally because native code (e.g. JVMTI agent) holding
 559   // jmethod_ids may access them after the associated classes and class loader
 560   // are unloaded. The Java Native Interface Specification says "method ID
 561   // does not prevent the VM from unloading the class from which the ID has
 562   // been derived. After the class is unloaded, the method or field ID becomes
 563   // invalid". In real world usages, the native code may rely on jmethod_ids
 564   // being NULL after class unloading. Hence, it is unsafe to free the memory
 565   // from the VM side without knowing when native code is going to stop using
 566   // them.
 567   if (_jmethod_ids != NULL) {
 568     Method::clear_jmethod_ids(this);
 569   }
 570 

 834 void ClassLoaderData::free_deallocate_list() {
 835   // This must be called at a safepoint because it depends on metadata walking at
 836   // safepoint cleanup time.
 837   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 838   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 839   if (_deallocate_list == NULL) {
 840     return;
 841   }
 842   // Go backwards because this removes entries that are freed.
 843   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 844     Metadata* m = _deallocate_list->at(i);
 845     if (!m->on_stack()) {
 846       _deallocate_list->remove_at(i);
 847       // There are only three types of metadata that we deallocate directly.
 848       // Cast them so they can be used by the template function.
 849       if (m->is_method()) {
 850         MetadataFactory::free_metadata(this, (Method*)m);
 851       } else if (m->is_constantPool()) {
 852         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 853       } else if (m->is_klass()) {
 854         MetadataFactory::free_metadata(this, (InstanceKlass*)m);




 855       } else {
 856         ShouldNotReachHere();
 857       }
 858     } else {
 859       // Metadata is alive.
 860       // If scratch_class is on stack then it shouldn't be on this list!
 861       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 862              "scratch classes on this list should be dead");
 863       // Also should assert that other metadata on the list was found in handles.
 864       // Some cleaning remains.
 865       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 866     }
 867   }
 868 }
 869 
 870 // This is distinct from free_deallocate_list.  For class loader data that are
 871 // unloading, this frees the C heap memory for items on the list, and unlinks
 872 // scratch or error classes so that unloading events aren't triggered for these
 873 // classes. The metadata is removed with the unloading metaspace.
 874 // There isn't C heap memory allocated for methods, so nothing is done for them.

  53 #include "classfile/javaClasses.hpp"
  54 #include "classfile/moduleEntry.hpp"
  55 #include "classfile/packageEntry.hpp"
  56 #include "classfile/symbolTable.hpp"
  57 #include "classfile/systemDictionary.hpp"
  58 #include "classfile/systemDictionaryShared.hpp"
  59 #include "classfile/vmClasses.hpp"
  60 #include "logging/log.hpp"
  61 #include "logging/logStream.hpp"
  62 #include "memory/allocation.inline.hpp"
  63 #include "memory/classLoaderMetaspace.hpp"
  64 #include "memory/metadataFactory.hpp"
  65 #include "memory/metaspace.hpp"
  66 #include "memory/resourceArea.hpp"
  67 #include "memory/universe.hpp"
  68 #include "oops/access.inline.hpp"
  69 #include "oops/klass.inline.hpp"
  70 #include "oops/objArrayKlass.hpp"
  71 #include "oops/oop.inline.hpp"
  72 #include "oops/oopHandle.inline.hpp"
  73 #include "oops/inlineKlass.inline.hpp"
  74 #include "oops/weakHandle.inline.hpp"
  75 #include "runtime/arguments.hpp"
  76 #include "runtime/atomic.hpp"
  77 #include "runtime/handles.inline.hpp"
  78 #include "runtime/mutex.hpp"
  79 #include "runtime/safepoint.hpp"
  80 #include "utilities/growableArray.hpp"
  81 #include "utilities/macros.hpp"
  82 #include "utilities/ostream.hpp"
  83 
  84 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
  85 
  86 void ClassLoaderData::init_null_class_loader_data() {
  87   assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
  88   assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
  89 
  90   _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
  91   ClassLoaderDataGraph::_head = _the_null_class_loader_data;
  92   assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
  93 

 378 
 379 #ifdef ASSERT
 380     oop m = k->java_mirror();
 381     assert(m != NULL, "NULL mirror");
 382     assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
 383 #endif
 384     klass_closure->do_klass(k);
 385   }
 386 }
 387 
 388 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
 389   // Lock-free access requires load_acquire
 390   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 391     if (k->is_instance_klass()) {
 392       f(InstanceKlass::cast(k));
 393     }
 394     assert(k != k->next_link(), "no loops!");
 395   }
 396 }
 397 
 398 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
 399   // Lock-free access requires load_acquire
 400   for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
 401     if (k->is_inline_klass()) {
 402       f(InlineKlass::cast(k));
 403     }
 404     assert(k != k->next_link(), "no loops!");
 405   }
 406 }
 407 
 408 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
 409   assert_locked_or_safepoint(Module_lock);
 410   if (_unnamed_module != NULL) {
 411     f(_unnamed_module);
 412   }
 413   if (_modules != NULL) {
 414     _modules->modules_do(f);
 415   }
 416 }
 417 
 418 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
 419   assert_locked_or_safepoint(Module_lock);
 420   if (_packages != NULL) {
 421     _packages->packages_do(f);
 422   }
 423 }
 424 
 425 void ClassLoaderData::record_dependency(const Klass* k) {
 426   assert(k != NULL, "invariant");
 427 

 542   }
 543   ShouldNotReachHere();   // should have found this class!!
 544 }
 545 
 546 void ClassLoaderData::unload() {
 547   _unloading = true;
 548 
 549   LogTarget(Trace, class, loader, data) lt;
 550   if (lt.is_enabled()) {
 551     ResourceMark rm;
 552     LogStream ls(lt);
 553     ls.print("unload");
 554     print_value_on(&ls);
 555     ls.cr();
 556   }
 557 
 558   // Some items on the _deallocate_list need to free their C heap structures
 559   // if they are not already on the _klasses list.
 560   free_deallocate_list_C_heap_structures();
 561 
 562   inline_classes_do(InlineKlass::cleanup);
 563 
 564   // Clean up class dependencies and tell serviceability tools
 565   // these classes are unloading.  Must be called
 566   // after erroneous classes are released.
 567   classes_do(InstanceKlass::unload_class);
 568 
 569   // Method::clear_jmethod_ids only sets the jmethod_ids to NULL without
 570   // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
 571   // This is done intentionally because native code (e.g. JVMTI agent) holding
 572   // jmethod_ids may access them after the associated classes and class loader
 573   // are unloaded. The Java Native Interface Specification says "method ID
 574   // does not prevent the VM from unloading the class from which the ID has
 575   // been derived. After the class is unloaded, the method or field ID becomes
 576   // invalid". In real world usages, the native code may rely on jmethod_ids
 577   // being NULL after class unloading. Hence, it is unsafe to free the memory
 578   // from the VM side without knowing when native code is going to stop using
 579   // them.
 580   if (_jmethod_ids != NULL) {
 581     Method::clear_jmethod_ids(this);
 582   }
 583 

 847 void ClassLoaderData::free_deallocate_list() {
 848   // This must be called at a safepoint because it depends on metadata walking at
 849   // safepoint cleanup time.
 850   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
 851   assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
 852   if (_deallocate_list == NULL) {
 853     return;
 854   }
 855   // Go backwards because this removes entries that are freed.
 856   for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
 857     Metadata* m = _deallocate_list->at(i);
 858     if (!m->on_stack()) {
 859       _deallocate_list->remove_at(i);
 860       // There are only three types of metadata that we deallocate directly.
 861       // Cast them so they can be used by the template function.
 862       if (m->is_method()) {
 863         MetadataFactory::free_metadata(this, (Method*)m);
 864       } else if (m->is_constantPool()) {
 865         MetadataFactory::free_metadata(this, (ConstantPool*)m);
 866       } else if (m->is_klass()) {
 867         if (!((Klass*)m)->is_inline_klass()) {
 868           MetadataFactory::free_metadata(this, (InstanceKlass*)m);
 869         } else {
 870           MetadataFactory::free_metadata(this, (InlineKlass*)m);
 871         }
 872       } else {
 873         ShouldNotReachHere();
 874       }
 875     } else {
 876       // Metadata is alive.
 877       // If scratch_class is on stack then it shouldn't be on this list!
 878       assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
 879              "scratch classes on this list should be dead");
 880       // Also should assert that other metadata on the list was found in handles.
 881       // Some cleaning remains.
 882       ClassLoaderDataGraph::set_should_clean_deallocate_lists();
 883     }
 884   }
 885 }
 886 
 887 // This is distinct from free_deallocate_list.  For class loader data that are
 888 // unloading, this frees the C heap memory for items on the list, and unlinks
 889 // scratch or error classes so that unloading events aren't triggered for these
 890 // classes. The metadata is removed with the unloading metaspace.
 891 // There isn't C heap memory allocated for methods, so nothing is done for them.
< prev index next >