49 #include "precompiled.hpp"
50 #include "classfile/classLoaderData.inline.hpp"
51 #include "classfile/classLoaderDataGraph.inline.hpp"
52 #include "classfile/dictionary.hpp"
53 #include "classfile/javaClasses.inline.hpp"
54 #include "classfile/moduleEntry.hpp"
55 #include "classfile/packageEntry.hpp"
56 #include "classfile/symbolTable.hpp"
57 #include "classfile/systemDictionary.hpp"
58 #include "classfile/systemDictionaryShared.hpp"
59 #include "classfile/vmClasses.hpp"
60 #include "logging/log.hpp"
61 #include "logging/logStream.hpp"
62 #include "memory/allocation.inline.hpp"
63 #include "memory/classLoaderMetaspace.hpp"
64 #include "memory/metadataFactory.hpp"
65 #include "memory/metaspace.hpp"
66 #include "memory/resourceArea.hpp"
67 #include "memory/universe.hpp"
68 #include "oops/access.inline.hpp"
69 #include "oops/klass.inline.hpp"
70 #include "oops/oop.inline.hpp"
71 #include "oops/oopHandle.inline.hpp"
72 #include "oops/verifyOopClosure.hpp"
73 #include "oops/weakHandle.inline.hpp"
74 #include "runtime/arguments.hpp"
75 #include "runtime/atomic.hpp"
76 #include "runtime/handles.inline.hpp"
77 #include "runtime/mutex.hpp"
78 #include "runtime/safepoint.hpp"
79 #include "utilities/growableArray.hpp"
80 #include "utilities/macros.hpp"
81 #include "utilities/ostream.hpp"
82
83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
84
85 void ClassLoaderData::init_null_class_loader_data() {
86 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
87 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
88
381
382 #ifdef ASSERT
383 oop m = k->java_mirror();
384 assert(m != nullptr, "nullptr mirror");
385 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
386 #endif
387 klass_closure->do_klass(k);
388 }
389 }
390
391 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
392 // Lock-free access requires load_acquire
393 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
394 if (k->is_instance_klass()) {
395 f(InstanceKlass::cast(k));
396 }
397 assert(k != k->next_link(), "no loops!");
398 }
399 }
400
401 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
402 assert_locked_or_safepoint(Module_lock);
403 if (_unnamed_module != nullptr) {
404 f(_unnamed_module);
405 }
406 if (_modules != nullptr) {
407 _modules->modules_do(f);
408 }
409 }
410
411 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
412 assert_locked_or_safepoint(Module_lock);
413 if (_packages != nullptr) {
414 _packages->packages_do(f);
415 }
416 }
417
418 void ClassLoaderData::record_dependency(const Klass* k) {
419 assert(k != nullptr, "invariant");
420
535 }
536 ShouldNotReachHere(); // should have found this class!!
537 }
538
539 void ClassLoaderData::unload() {
540 _unloading = true;
541
542 LogTarget(Trace, class, loader, data) lt;
543 if (lt.is_enabled()) {
544 ResourceMark rm;
545 LogStream ls(lt);
546 ls.print("unload");
547 print_value_on(&ls);
548 ls.cr();
549 }
550
551 // Some items on the _deallocate_list need to free their C heap structures
552 // if they are not already on the _klasses list.
553 free_deallocate_list_C_heap_structures();
554
555 // Clean up class dependencies and tell serviceability tools
556 // these classes are unloading. Must be called
557 // after erroneous classes are released.
558 classes_do(InstanceKlass::unload_class);
559
560 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
561 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
562 // This is done intentionally because native code (e.g. JVMTI agent) holding
563 // jmethod_ids may access them after the associated classes and class loader
564 // are unloaded. The Java Native Interface Specification says "method ID
565 // does not prevent the VM from unloading the class from which the ID has
566 // been derived. After the class is unloaded, the method or field ID becomes
567 // invalid". In real world usages, the native code may rely on jmethod_ids
568 // being null after class unloading. Hence, it is unsafe to free the memory
569 // from the VM side without knowing when native code is going to stop using
570 // them.
571 if (_jmethod_ids != nullptr) {
572 Method::clear_jmethod_ids(this);
573 }
574
832 void ClassLoaderData::free_deallocate_list() {
833 // This must be called at a safepoint because it depends on metadata walking at
834 // safepoint cleanup time.
835 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
836 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
837 if (_deallocate_list == nullptr) {
838 return;
839 }
840 // Go backwards because this removes entries that are freed.
841 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
842 Metadata* m = _deallocate_list->at(i);
843 if (!m->on_stack()) {
844 _deallocate_list->remove_at(i);
845 // There are only three types of metadata that we deallocate directly.
846 // Cast them so they can be used by the template function.
847 if (m->is_method()) {
848 MetadataFactory::free_metadata(this, (Method*)m);
849 } else if (m->is_constantPool()) {
850 MetadataFactory::free_metadata(this, (ConstantPool*)m);
851 } else if (m->is_klass()) {
852 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
853 } else {
854 ShouldNotReachHere();
855 }
856 } else {
857 // Metadata is alive.
858 // If scratch_class is on stack then it shouldn't be on this list!
859 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
860 "scratch classes on this list should be dead");
861 // Also should assert that other metadata on the list was found in handles.
862 // Some cleaning remains.
863 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
864 }
865 }
866 }
867
868 // This is distinct from free_deallocate_list. For class loader data that are
869 // unloading, this frees the C heap memory for items on the list, and unlinks
870 // scratch or error classes so that unloading events aren't triggered for these
871 // classes. The metadata is removed with the unloading metaspace.
872 // There isn't C heap memory allocated for methods, so nothing is done for them.
|
49 #include "precompiled.hpp"
50 #include "classfile/classLoaderData.inline.hpp"
51 #include "classfile/classLoaderDataGraph.inline.hpp"
52 #include "classfile/dictionary.hpp"
53 #include "classfile/javaClasses.inline.hpp"
54 #include "classfile/moduleEntry.hpp"
55 #include "classfile/packageEntry.hpp"
56 #include "classfile/symbolTable.hpp"
57 #include "classfile/systemDictionary.hpp"
58 #include "classfile/systemDictionaryShared.hpp"
59 #include "classfile/vmClasses.hpp"
60 #include "logging/log.hpp"
61 #include "logging/logStream.hpp"
62 #include "memory/allocation.inline.hpp"
63 #include "memory/classLoaderMetaspace.hpp"
64 #include "memory/metadataFactory.hpp"
65 #include "memory/metaspace.hpp"
66 #include "memory/resourceArea.hpp"
67 #include "memory/universe.hpp"
68 #include "oops/access.inline.hpp"
69 #include "oops/inlineKlass.inline.hpp"
70 #include "oops/klass.inline.hpp"
71 #include "oops/oop.inline.hpp"
72 #include "oops/oopHandle.inline.hpp"
73 #include "oops/verifyOopClosure.hpp"
74 #include "oops/weakHandle.inline.hpp"
75 #include "runtime/arguments.hpp"
76 #include "runtime/atomic.hpp"
77 #include "runtime/handles.inline.hpp"
78 #include "runtime/mutex.hpp"
79 #include "runtime/safepoint.hpp"
80 #include "utilities/growableArray.hpp"
81 #include "utilities/macros.hpp"
82 #include "utilities/ostream.hpp"
83
84 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
85
86 void ClassLoaderData::init_null_class_loader_data() {
87 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
88 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
89
382
383 #ifdef ASSERT
384 oop m = k->java_mirror();
385 assert(m != nullptr, "nullptr mirror");
386 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
387 #endif
388 klass_closure->do_klass(k);
389 }
390 }
391
392 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
393 // Lock-free access requires load_acquire
394 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
395 if (k->is_instance_klass()) {
396 f(InstanceKlass::cast(k));
397 }
398 assert(k != k->next_link(), "no loops!");
399 }
400 }
401
402 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
403 // Lock-free access requires load_acquire
404 for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
405 if (k->is_inline_klass()) {
406 f(InlineKlass::cast(k));
407 }
408 assert(k != k->next_link(), "no loops!");
409 }
410 }
411
412 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
413 assert_locked_or_safepoint(Module_lock);
414 if (_unnamed_module != nullptr) {
415 f(_unnamed_module);
416 }
417 if (_modules != nullptr) {
418 _modules->modules_do(f);
419 }
420 }
421
422 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
423 assert_locked_or_safepoint(Module_lock);
424 if (_packages != nullptr) {
425 _packages->packages_do(f);
426 }
427 }
428
429 void ClassLoaderData::record_dependency(const Klass* k) {
430 assert(k != nullptr, "invariant");
431
546 }
547 ShouldNotReachHere(); // should have found this class!!
548 }
549
550 void ClassLoaderData::unload() {
551 _unloading = true;
552
553 LogTarget(Trace, class, loader, data) lt;
554 if (lt.is_enabled()) {
555 ResourceMark rm;
556 LogStream ls(lt);
557 ls.print("unload");
558 print_value_on(&ls);
559 ls.cr();
560 }
561
562 // Some items on the _deallocate_list need to free their C heap structures
563 // if they are not already on the _klasses list.
564 free_deallocate_list_C_heap_structures();
565
566 inline_classes_do(InlineKlass::cleanup);
567
568 // Clean up class dependencies and tell serviceability tools
569 // these classes are unloading. Must be called
570 // after erroneous classes are released.
571 classes_do(InstanceKlass::unload_class);
572
573 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
574 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
575 // This is done intentionally because native code (e.g. JVMTI agent) holding
576 // jmethod_ids may access them after the associated classes and class loader
577 // are unloaded. The Java Native Interface Specification says "method ID
578 // does not prevent the VM from unloading the class from which the ID has
579 // been derived. After the class is unloaded, the method or field ID becomes
580 // invalid". In real world usages, the native code may rely on jmethod_ids
581 // being null after class unloading. Hence, it is unsafe to free the memory
582 // from the VM side without knowing when native code is going to stop using
583 // them.
584 if (_jmethod_ids != nullptr) {
585 Method::clear_jmethod_ids(this);
586 }
587
845 void ClassLoaderData::free_deallocate_list() {
846 // This must be called at a safepoint because it depends on metadata walking at
847 // safepoint cleanup time.
848 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
849 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
850 if (_deallocate_list == nullptr) {
851 return;
852 }
853 // Go backwards because this removes entries that are freed.
854 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
855 Metadata* m = _deallocate_list->at(i);
856 if (!m->on_stack()) {
857 _deallocate_list->remove_at(i);
858 // There are only three types of metadata that we deallocate directly.
859 // Cast them so they can be used by the template function.
860 if (m->is_method()) {
861 MetadataFactory::free_metadata(this, (Method*)m);
862 } else if (m->is_constantPool()) {
863 MetadataFactory::free_metadata(this, (ConstantPool*)m);
864 } else if (m->is_klass()) {
865 if (!((Klass*)m)->is_inline_klass()) {
866 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
867 } else {
868 MetadataFactory::free_metadata(this, (InlineKlass*)m);
869 }
870 } else {
871 ShouldNotReachHere();
872 }
873 } else {
874 // Metadata is alive.
875 // If scratch_class is on stack then it shouldn't be on this list!
876 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
877 "scratch classes on this list should be dead");
878 // Also should assert that other metadata on the list was found in handles.
879 // Some cleaning remains.
880 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
881 }
882 }
883 }
884
885 // This is distinct from free_deallocate_list. For class loader data that are
886 // unloading, this frees the C heap memory for items on the list, and unlinks
887 // scratch or error classes so that unloading events aren't triggered for these
888 // classes. The metadata is removed with the unloading metaspace.
889 // There isn't C heap memory allocated for methods, so nothing is done for them.
|