49 #include "precompiled.hpp"
50 #include "classfile/classLoaderData.inline.hpp"
51 #include "classfile/classLoaderDataGraph.inline.hpp"
52 #include "classfile/dictionary.hpp"
53 #include "classfile/javaClasses.inline.hpp"
54 #include "classfile/moduleEntry.hpp"
55 #include "classfile/packageEntry.hpp"
56 #include "classfile/symbolTable.hpp"
57 #include "classfile/systemDictionary.hpp"
58 #include "classfile/systemDictionaryShared.hpp"
59 #include "classfile/vmClasses.hpp"
60 #include "logging/log.hpp"
61 #include "logging/logStream.hpp"
62 #include "memory/allocation.inline.hpp"
63 #include "memory/classLoaderMetaspace.hpp"
64 #include "memory/metadataFactory.hpp"
65 #include "memory/metaspace.hpp"
66 #include "memory/resourceArea.hpp"
67 #include "memory/universe.hpp"
68 #include "oops/access.inline.hpp"
69 #include "oops/klass.inline.hpp"
70 #include "oops/oop.inline.hpp"
71 #include "oops/oopHandle.inline.hpp"
72 #include "oops/verifyOopClosure.hpp"
73 #include "oops/weakHandle.inline.hpp"
74 #include "runtime/arguments.hpp"
75 #include "runtime/atomic.hpp"
76 #include "runtime/handles.inline.hpp"
77 #include "runtime/mutex.hpp"
78 #include "runtime/safepoint.hpp"
79 #include "utilities/growableArray.hpp"
80 #include "utilities/macros.hpp"
81 #include "utilities/ostream.hpp"
82
83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
84
85 void ClassLoaderData::init_null_class_loader_data() {
86 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
87 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
88
430
431 #ifdef ASSERT
432 oop m = k->java_mirror();
433 assert(m != nullptr, "nullptr mirror");
434 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
435 #endif
436 klass_closure->do_klass(k);
437 }
438 }
439
440 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
441 // Lock-free access requires load_acquire
442 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
443 if (k->is_instance_klass()) {
444 f(InstanceKlass::cast(k));
445 }
446 assert(k != k->next_link(), "no loops!");
447 }
448 }
449
450 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
451 assert_locked_or_safepoint(Module_lock);
452 if (_unnamed_module != nullptr) {
453 f(_unnamed_module);
454 }
455 if (_modules != nullptr) {
456 _modules->modules_do(f);
457 }
458 }
459
460 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
461 assert_locked_or_safepoint(Module_lock);
462 if (_packages != nullptr) {
463 _packages->packages_do(f);
464 }
465 }
466
467 void ClassLoaderData::record_dependency(const Klass* k) {
468 assert(k != nullptr, "invariant");
469
581 }
582 ShouldNotReachHere(); // should have found this class!!
583 }
584
585 void ClassLoaderData::unload() {
586 _unloading = true;
587
588 LogTarget(Trace, class, loader, data) lt;
589 if (lt.is_enabled()) {
590 ResourceMark rm;
591 LogStream ls(lt);
592 ls.print("unload");
593 print_value_on(&ls);
594 ls.cr();
595 }
596
597 // Some items on the _deallocate_list need to free their C heap structures
598 // if they are not already on the _klasses list.
599 free_deallocate_list_C_heap_structures();
600
601 // Clean up class dependencies and tell serviceability tools
602 // these classes are unloading. This must be called
603 // after erroneous classes are released.
604 classes_do(InstanceKlass::unload_class);
605
606 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
607 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
608 // This is done intentionally because native code (e.g. JVMTI agent) holding
609 // jmethod_ids may access them after the associated classes and class loader
610 // are unloaded. The Java Native Interface Specification says "method ID
611 // does not prevent the VM from unloading the class from which the ID has
612 // been derived. After the class is unloaded, the method or field ID becomes
613 // invalid". In real world usages, the native code may rely on jmethod_ids
614 // being null after class unloading. Hence, it is unsafe to free the memory
615 // from the VM side without knowing when native code is going to stop using
616 // them.
617 if (_jmethod_ids != nullptr) {
618 Method::clear_jmethod_ids(this);
619 }
620 }
875 void ClassLoaderData::free_deallocate_list() {
876 // This must be called at a safepoint because it depends on metadata walking at
877 // safepoint cleanup time.
878 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
879 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
880 if (_deallocate_list == nullptr) {
881 return;
882 }
883 // Go backwards because this removes entries that are freed.
884 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
885 Metadata* m = _deallocate_list->at(i);
886 if (!m->on_stack()) {
887 _deallocate_list->remove_at(i);
888 // There are only three types of metadata that we deallocate directly.
889 // Cast them so they can be used by the template function.
890 if (m->is_method()) {
891 MetadataFactory::free_metadata(this, (Method*)m);
892 } else if (m->is_constantPool()) {
893 MetadataFactory::free_metadata(this, (ConstantPool*)m);
894 } else if (m->is_klass()) {
895 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
896 } else {
897 ShouldNotReachHere();
898 }
899 } else {
900 // Metadata is alive.
901 // If scratch_class is on stack then it shouldn't be on this list!
902 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
903 "scratch classes on this list should be dead");
904 // Also should assert that other metadata on the list was found in handles.
905 // Some cleaning remains.
906 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
907 }
908 }
909 }
910
911 // This is distinct from free_deallocate_list. For class loader data that are
912 // unloading, this frees the C heap memory for items on the list, and unlinks
913 // scratch or error classes so that unloading events aren't triggered for these
914 // classes. The metadata is removed with the unloading metaspace.
915 // There isn't C heap memory allocated for methods, so nothing is done for them.
|
49 #include "precompiled.hpp"
50 #include "classfile/classLoaderData.inline.hpp"
51 #include "classfile/classLoaderDataGraph.inline.hpp"
52 #include "classfile/dictionary.hpp"
53 #include "classfile/javaClasses.inline.hpp"
54 #include "classfile/moduleEntry.hpp"
55 #include "classfile/packageEntry.hpp"
56 #include "classfile/symbolTable.hpp"
57 #include "classfile/systemDictionary.hpp"
58 #include "classfile/systemDictionaryShared.hpp"
59 #include "classfile/vmClasses.hpp"
60 #include "logging/log.hpp"
61 #include "logging/logStream.hpp"
62 #include "memory/allocation.inline.hpp"
63 #include "memory/classLoaderMetaspace.hpp"
64 #include "memory/metadataFactory.hpp"
65 #include "memory/metaspace.hpp"
66 #include "memory/resourceArea.hpp"
67 #include "memory/universe.hpp"
68 #include "oops/access.inline.hpp"
69 #include "oops/inlineKlass.inline.hpp"
70 #include "oops/klass.inline.hpp"
71 #include "oops/oop.inline.hpp"
72 #include "oops/oopHandle.inline.hpp"
73 #include "oops/verifyOopClosure.hpp"
74 #include "oops/weakHandle.inline.hpp"
75 #include "runtime/arguments.hpp"
76 #include "runtime/atomic.hpp"
77 #include "runtime/handles.inline.hpp"
78 #include "runtime/mutex.hpp"
79 #include "runtime/safepoint.hpp"
80 #include "utilities/growableArray.hpp"
81 #include "utilities/macros.hpp"
82 #include "utilities/ostream.hpp"
83
84 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
85
86 void ClassLoaderData::init_null_class_loader_data() {
87 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
88 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
89
431
432 #ifdef ASSERT
433 oop m = k->java_mirror();
434 assert(m != nullptr, "nullptr mirror");
435 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
436 #endif
437 klass_closure->do_klass(k);
438 }
439 }
440
441 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
442 // Lock-free access requires load_acquire
443 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
444 if (k->is_instance_klass()) {
445 f(InstanceKlass::cast(k));
446 }
447 assert(k != k->next_link(), "no loops!");
448 }
449 }
450
451 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
452 // Lock-free access requires load_acquire
453 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
454 if (k->is_inline_klass()) {
455 f(InlineKlass::cast(k));
456 }
457 assert(k != k->next_link(), "no loops!");
458 }
459 }
460
461 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
462 assert_locked_or_safepoint(Module_lock);
463 if (_unnamed_module != nullptr) {
464 f(_unnamed_module);
465 }
466 if (_modules != nullptr) {
467 _modules->modules_do(f);
468 }
469 }
470
471 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
472 assert_locked_or_safepoint(Module_lock);
473 if (_packages != nullptr) {
474 _packages->packages_do(f);
475 }
476 }
477
478 void ClassLoaderData::record_dependency(const Klass* k) {
479 assert(k != nullptr, "invariant");
480
592 }
593 ShouldNotReachHere(); // should have found this class!!
594 }
595
596 void ClassLoaderData::unload() {
597 _unloading = true;
598
599 LogTarget(Trace, class, loader, data) lt;
600 if (lt.is_enabled()) {
601 ResourceMark rm;
602 LogStream ls(lt);
603 ls.print("unload");
604 print_value_on(&ls);
605 ls.cr();
606 }
607
608 // Some items on the _deallocate_list need to free their C heap structures
609 // if they are not already on the _klasses list.
610 free_deallocate_list_C_heap_structures();
611
612 inline_classes_do(InlineKlass::cleanup);
613
614 // Clean up class dependencies and tell serviceability tools
615 // these classes are unloading. This must be called
616 // after erroneous classes are released.
617 classes_do(InstanceKlass::unload_class);
618
619 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
620 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
621 // This is done intentionally because native code (e.g. JVMTI agent) holding
622 // jmethod_ids may access them after the associated classes and class loader
623 // are unloaded. The Java Native Interface Specification says "method ID
624 // does not prevent the VM from unloading the class from which the ID has
625 // been derived. After the class is unloaded, the method or field ID becomes
626 // invalid". In real world usages, the native code may rely on jmethod_ids
627 // being null after class unloading. Hence, it is unsafe to free the memory
628 // from the VM side without knowing when native code is going to stop using
629 // them.
630 if (_jmethod_ids != nullptr) {
631 Method::clear_jmethod_ids(this);
632 }
633 }
888 void ClassLoaderData::free_deallocate_list() {
889 // This must be called at a safepoint because it depends on metadata walking at
890 // safepoint cleanup time.
891 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
892 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
893 if (_deallocate_list == nullptr) {
894 return;
895 }
896 // Go backwards because this removes entries that are freed.
897 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
898 Metadata* m = _deallocate_list->at(i);
899 if (!m->on_stack()) {
900 _deallocate_list->remove_at(i);
901 // There are only three types of metadata that we deallocate directly.
902 // Cast them so they can be used by the template function.
903 if (m->is_method()) {
904 MetadataFactory::free_metadata(this, (Method*)m);
905 } else if (m->is_constantPool()) {
906 MetadataFactory::free_metadata(this, (ConstantPool*)m);
907 } else if (m->is_klass()) {
908 if (!((Klass*)m)->is_inline_klass()) {
909 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
910 } else {
911 MetadataFactory::free_metadata(this, (InlineKlass*)m);
912 }
913 } else {
914 ShouldNotReachHere();
915 }
916 } else {
917 // Metadata is alive.
918 // If scratch_class is on stack then it shouldn't be on this list!
919 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
920 "scratch classes on this list should be dead");
921 // Also should assert that other metadata on the list was found in handles.
922 // Some cleaning remains.
923 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
924 }
925 }
926 }
927
928 // This is distinct from free_deallocate_list. For class loader data that are
929 // unloading, this frees the C heap memory for items on the list, and unlinks
930 // scratch or error classes so that unloading events aren't triggered for these
931 // classes. The metadata is removed with the unloading metaspace.
932 // There isn't C heap memory allocated for methods, so nothing is done for them.
|