48
49 #include "classfile/classLoaderData.inline.hpp"
50 #include "classfile/classLoaderDataGraph.inline.hpp"
51 #include "classfile/dictionary.hpp"
52 #include "classfile/javaClasses.inline.hpp"
53 #include "classfile/moduleEntry.hpp"
54 #include "classfile/packageEntry.hpp"
55 #include "classfile/symbolTable.hpp"
56 #include "classfile/systemDictionary.hpp"
57 #include "classfile/systemDictionaryShared.hpp"
58 #include "classfile/vmClasses.hpp"
59 #include "logging/log.hpp"
60 #include "logging/logStream.hpp"
61 #include "memory/allocation.inline.hpp"
62 #include "memory/classLoaderMetaspace.hpp"
63 #include "memory/metadataFactory.hpp"
64 #include "memory/metaspace.hpp"
65 #include "memory/resourceArea.hpp"
66 #include "memory/universe.hpp"
67 #include "oops/access.inline.hpp"
68 #include "oops/klass.inline.hpp"
69 #include "oops/oop.inline.hpp"
70 #include "oops/oopHandle.inline.hpp"
71 #include "oops/verifyOopClosure.hpp"
72 #include "oops/weakHandle.inline.hpp"
73 #include "runtime/arguments.hpp"
74 #include "runtime/atomic.hpp"
75 #include "runtime/handles.inline.hpp"
76 #include "runtime/mutex.hpp"
77 #include "runtime/safepoint.hpp"
78 #include "utilities/growableArray.hpp"
79 #include "utilities/macros.hpp"
80 #include "utilities/ostream.hpp"
81
82 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
83
84 void ClassLoaderData::init_null_class_loader_data() {
85 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
86 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
87
426
427 #ifdef ASSERT
428 oop m = k->java_mirror();
429 assert(m != nullptr, "nullptr mirror");
430 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
431 #endif
432 klass_closure->do_klass(k);
433 }
434 }
435
436 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
437 // Lock-free access requires load_acquire
438 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
439 if (k->is_instance_klass()) {
440 f(InstanceKlass::cast(k));
441 }
442 assert(k != k->next_link(), "no loops!");
443 }
444 }
445
446 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
447 assert_locked_or_safepoint(Module_lock);
448 if (_unnamed_module != nullptr) {
449 f(_unnamed_module);
450 }
451 if (_modules != nullptr) {
452 _modules->modules_do(f);
453 }
454 }
455
456 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
457 assert_locked_or_safepoint(Module_lock);
458 if (_packages != nullptr) {
459 _packages->packages_do(f);
460 }
461 }
462
463 void ClassLoaderData::record_dependency(const Klass* k) {
464 assert(k != nullptr, "invariant");
465
577 }
578 ShouldNotReachHere(); // should have found this class!!
579 }
580
581 void ClassLoaderData::unload() {
582 _unloading = true;
583
584 LogTarget(Trace, class, loader, data) lt;
585 if (lt.is_enabled()) {
586 ResourceMark rm;
587 LogStream ls(lt);
588 ls.print("unload");
589 print_value_on(&ls);
590 ls.cr();
591 }
592
593 // Some items on the _deallocate_list need to free their C heap structures
594 // if they are not already on the _klasses list.
595 free_deallocate_list_C_heap_structures();
596
597 // Clean up class dependencies and tell serviceability tools
598 // these classes are unloading. This must be called
599 // after erroneous classes are released.
600 classes_do(InstanceKlass::unload_class);
601
602 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
603 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
604 // This is done intentionally because native code (e.g. JVMTI agent) holding
605 // jmethod_ids may access them after the associated classes and class loader
606 // are unloaded. The Java Native Interface Specification says "method ID
607 // does not prevent the VM from unloading the class from which the ID has
608 // been derived. After the class is unloaded, the method or field ID becomes
609 // invalid". In real world usages, the native code may rely on jmethod_ids
610 // being null after class unloading. Hence, it is unsafe to free the memory
611 // from the VM side without knowing when native code is going to stop using
612 // them.
613 if (_jmethod_ids != nullptr) {
614 Method::clear_jmethod_ids(this);
615 }
616 }
867 void ClassLoaderData::free_deallocate_list() {
868 // This must be called at a safepoint because it depends on metadata walking at
869 // safepoint cleanup time.
870 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
871 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
872 if (_deallocate_list == nullptr) {
873 return;
874 }
875 // Go backwards because this removes entries that are freed.
876 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
877 Metadata* m = _deallocate_list->at(i);
878 if (!m->on_stack()) {
879 _deallocate_list->remove_at(i);
880 // There are only three types of metadata that we deallocate directly.
881 // Cast them so they can be used by the template function.
882 if (m->is_method()) {
883 MetadataFactory::free_metadata(this, (Method*)m);
884 } else if (m->is_constantPool()) {
885 MetadataFactory::free_metadata(this, (ConstantPool*)m);
886 } else if (m->is_klass()) {
887 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
888 } else {
889 ShouldNotReachHere();
890 }
891 } else {
892 // Metadata is alive.
893 // If scratch_class is on stack then it shouldn't be on this list!
894 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
895 "scratch classes on this list should be dead");
896 // Also should assert that other metadata on the list was found in handles.
897 // Some cleaning remains.
898 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
899 }
900 }
901 }
902
903 // This is distinct from free_deallocate_list. For class loader data that are
904 // unloading, this frees the C heap memory for items on the list, and unlinks
905 // scratch or error classes so that unloading events aren't triggered for these
906 // classes. The metadata is removed with the unloading metaspace.
907 // There isn't C heap memory allocated for methods, so nothing is done for them.
|
48
49 #include "classfile/classLoaderData.inline.hpp"
50 #include "classfile/classLoaderDataGraph.inline.hpp"
51 #include "classfile/dictionary.hpp"
52 #include "classfile/javaClasses.inline.hpp"
53 #include "classfile/moduleEntry.hpp"
54 #include "classfile/packageEntry.hpp"
55 #include "classfile/symbolTable.hpp"
56 #include "classfile/systemDictionary.hpp"
57 #include "classfile/systemDictionaryShared.hpp"
58 #include "classfile/vmClasses.hpp"
59 #include "logging/log.hpp"
60 #include "logging/logStream.hpp"
61 #include "memory/allocation.inline.hpp"
62 #include "memory/classLoaderMetaspace.hpp"
63 #include "memory/metadataFactory.hpp"
64 #include "memory/metaspace.hpp"
65 #include "memory/resourceArea.hpp"
66 #include "memory/universe.hpp"
67 #include "oops/access.inline.hpp"
68 #include "oops/inlineKlass.inline.hpp"
69 #include "oops/klass.inline.hpp"
70 #include "oops/oop.inline.hpp"
71 #include "oops/oopHandle.inline.hpp"
72 #include "oops/verifyOopClosure.hpp"
73 #include "oops/weakHandle.inline.hpp"
74 #include "runtime/arguments.hpp"
75 #include "runtime/atomic.hpp"
76 #include "runtime/handles.inline.hpp"
77 #include "runtime/mutex.hpp"
78 #include "runtime/safepoint.hpp"
79 #include "utilities/growableArray.hpp"
80 #include "utilities/macros.hpp"
81 #include "utilities/ostream.hpp"
82
83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr;
84
85 void ClassLoaderData::init_null_class_loader_data() {
86 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice");
87 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice");
88
427
428 #ifdef ASSERT
429 oop m = k->java_mirror();
430 assert(m != nullptr, "nullptr mirror");
431 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror");
432 #endif
433 klass_closure->do_klass(k);
434 }
435 }
436
437 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
438 // Lock-free access requires load_acquire
439 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
440 if (k->is_instance_klass()) {
441 f(InstanceKlass::cast(k));
442 }
443 assert(k != k->next_link(), "no loops!");
444 }
445 }
446
447 void ClassLoaderData::inline_classes_do(void f(InlineKlass*)) {
448 // Lock-free access requires load_acquire
449 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) {
450 if (k->is_inline_klass()) {
451 f(InlineKlass::cast(k));
452 }
453 assert(k != k->next_link(), "no loops!");
454 }
455 }
456
457 void ClassLoaderData::modules_do(void f(ModuleEntry*)) {
458 assert_locked_or_safepoint(Module_lock);
459 if (_unnamed_module != nullptr) {
460 f(_unnamed_module);
461 }
462 if (_modules != nullptr) {
463 _modules->modules_do(f);
464 }
465 }
466
467 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
468 assert_locked_or_safepoint(Module_lock);
469 if (_packages != nullptr) {
470 _packages->packages_do(f);
471 }
472 }
473
474 void ClassLoaderData::record_dependency(const Klass* k) {
475 assert(k != nullptr, "invariant");
476
588 }
589 ShouldNotReachHere(); // should have found this class!!
590 }
591
592 void ClassLoaderData::unload() {
593 _unloading = true;
594
595 LogTarget(Trace, class, loader, data) lt;
596 if (lt.is_enabled()) {
597 ResourceMark rm;
598 LogStream ls(lt);
599 ls.print("unload");
600 print_value_on(&ls);
601 ls.cr();
602 }
603
604 // Some items on the _deallocate_list need to free their C heap structures
605 // if they are not already on the _klasses list.
606 free_deallocate_list_C_heap_structures();
607
608 inline_classes_do(InlineKlass::cleanup);
609
610 // Clean up class dependencies and tell serviceability tools
611 // these classes are unloading. This must be called
612 // after erroneous classes are released.
613 classes_do(InstanceKlass::unload_class);
614
615 // Method::clear_jmethod_ids only sets the jmethod_ids to null without
616 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes.
617 // This is done intentionally because native code (e.g. JVMTI agent) holding
618 // jmethod_ids may access them after the associated classes and class loader
619 // are unloaded. The Java Native Interface Specification says "method ID
620 // does not prevent the VM from unloading the class from which the ID has
621 // been derived. After the class is unloaded, the method or field ID becomes
622 // invalid". In real world usages, the native code may rely on jmethod_ids
623 // being null after class unloading. Hence, it is unsafe to free the memory
624 // from the VM side without knowing when native code is going to stop using
625 // them.
626 if (_jmethod_ids != nullptr) {
627 Method::clear_jmethod_ids(this);
628 }
629 }
880 void ClassLoaderData::free_deallocate_list() {
881 // This must be called at a safepoint because it depends on metadata walking at
882 // safepoint cleanup time.
883 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
884 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
885 if (_deallocate_list == nullptr) {
886 return;
887 }
888 // Go backwards because this removes entries that are freed.
889 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
890 Metadata* m = _deallocate_list->at(i);
891 if (!m->on_stack()) {
892 _deallocate_list->remove_at(i);
893 // There are only three types of metadata that we deallocate directly.
894 // Cast them so they can be used by the template function.
895 if (m->is_method()) {
896 MetadataFactory::free_metadata(this, (Method*)m);
897 } else if (m->is_constantPool()) {
898 MetadataFactory::free_metadata(this, (ConstantPool*)m);
899 } else if (m->is_klass()) {
900 if (!((Klass*)m)->is_inline_klass()) {
901 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
902 } else {
903 MetadataFactory::free_metadata(this, (InlineKlass*)m);
904 }
905 } else {
906 ShouldNotReachHere();
907 }
908 } else {
909 // Metadata is alive.
910 // If scratch_class is on stack then it shouldn't be on this list!
911 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
912 "scratch classes on this list should be dead");
913 // Also should assert that other metadata on the list was found in handles.
914 // Some cleaning remains.
915 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
916 }
917 }
918 }
919
920 // This is distinct from free_deallocate_list. For class loader data that are
921 // unloading, this frees the C heap memory for items on the list, and unlinks
922 // scratch or error classes so that unloading events aren't triggered for these
923 // classes. The metadata is removed with the unloading metaspace.
924 // There isn't C heap memory allocated for methods, so nothing is done for them.
|