1 /* 2 * Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 // A ClassLoaderData identifies the full set of class types that a class 26 // loader's name resolution strategy produces for a given configuration of the 27 // class loader. 28 // Class types in the ClassLoaderData may be defined by from class file binaries 29 // provided by the class loader, or from other class loader it interacts with 30 // according to its name resolution strategy. 31 // 32 // Class loaders that implement a deterministic name resolution strategy 33 // (including with respect to their delegation behavior), such as the boot, the 34 // platform, and the system loaders of the JDK's built-in class loader 35 // hierarchy, always produce the same linkset for a given configuration. 36 // 37 // ClassLoaderData carries information related to a linkset (e.g., 38 // metaspace holding its klass definitions). 39 // The System Dictionary and related data structures (e.g., placeholder table, 40 // loader constraints table) as well as the runtime representation of classes 41 // only reference ClassLoaderData. 42 // 43 // Instances of java.lang.ClassLoader holds a pointer to a ClassLoaderData that 44 // that represent the loader's "linking domain" in the JVM. 45 // 46 // The bootstrap loader (represented by null) also has a ClassLoaderData, 47 // the singleton class the_null_class_loader_data(). 48 49 #include "precompiled.hpp" 50 #include "classfile/classLoaderData.inline.hpp" 51 #include "classfile/classLoaderDataGraph.inline.hpp" 52 #include "classfile/dictionary.hpp" 53 #include "classfile/javaClasses.inline.hpp" 54 #include "classfile/moduleEntry.hpp" 55 #include "classfile/packageEntry.hpp" 56 #include "classfile/symbolTable.hpp" 57 #include "classfile/systemDictionary.hpp" 58 #include "classfile/systemDictionaryShared.hpp" 59 #include "classfile/vmClasses.hpp" 60 #include "logging/log.hpp" 61 #include "logging/logStream.hpp" 62 #include "memory/allocation.inline.hpp" 63 #include "memory/classLoaderMetaspace.hpp" 64 #include "memory/metadataFactory.hpp" 65 #include "memory/metaspace.hpp" 66 #include "memory/resourceArea.hpp" 67 #include "memory/universe.hpp" 68 #include "oops/access.inline.hpp" 69 #include "oops/klass.inline.hpp" 70 #include "oops/oop.inline.hpp" 71 #include "oops/oopHandle.inline.hpp" 72 #include "oops/verifyOopClosure.hpp" 73 #include "oops/weakHandle.inline.hpp" 74 #include "runtime/arguments.hpp" 75 #include "runtime/atomic.hpp" 76 #include "runtime/handles.inline.hpp" 77 #include "runtime/mutex.hpp" 78 #include "runtime/safepoint.hpp" 79 #include "utilities/growableArray.hpp" 80 #include "utilities/macros.hpp" 81 #include "utilities/ostream.hpp" 82 83 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = nullptr; 84 85 void ClassLoaderData::init_null_class_loader_data() { 86 assert(_the_null_class_loader_data == nullptr, "cannot initialize twice"); 87 assert(ClassLoaderDataGraph::_head == nullptr, "cannot initialize twice"); 88 89 _the_null_class_loader_data = new ClassLoaderData(Handle(), false); 90 ClassLoaderDataGraph::_head = _the_null_class_loader_data; 91 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); 92 93 LogTarget(Trace, class, loader, data) lt; 94 if (lt.is_enabled()) { 95 ResourceMark rm; 96 LogStream ls(lt); 97 ls.print("create "); 98 _the_null_class_loader_data->print_value_on(&ls); 99 ls.cr(); 100 } 101 } 102 103 // Obtain and set the class loader's name within the ClassLoaderData so 104 // it will be available for error messages, logging, JFR, etc. The name 105 // and klass are available after the class_loader oop is no longer alive, 106 // during unloading. 107 void ClassLoaderData::initialize_name(Handle class_loader) { 108 ResourceMark rm; 109 110 // Obtain the class loader's name. If the class loader's name was not 111 // explicitly set during construction, the CLD's _name field will be null. 112 oop cl_name = java_lang_ClassLoader::name(class_loader()); 113 if (cl_name != nullptr) { 114 const char* cl_instance_name = java_lang_String::as_utf8_string(cl_name); 115 116 if (cl_instance_name != nullptr && cl_instance_name[0] != '\0') { 117 _name = SymbolTable::new_symbol(cl_instance_name); 118 } 119 } 120 121 // Obtain the class loader's name and identity hash. If the class loader's 122 // name was not explicitly set during construction, the class loader's name and id 123 // will be set to the qualified class name of the class loader along with its 124 // identity hash. 125 // If for some reason the ClassLoader's constructor has not been run, instead of 126 // leaving the _name_and_id field null, fall back to the external qualified class 127 // name. Thus CLD's _name_and_id field should never have a null value. 128 oop cl_name_and_id = java_lang_ClassLoader::nameAndId(class_loader()); 129 const char* cl_instance_name_and_id = 130 (cl_name_and_id == nullptr) ? _class_loader_klass->external_name() : 131 java_lang_String::as_utf8_string(cl_name_and_id); 132 assert(cl_instance_name_and_id != nullptr && cl_instance_name_and_id[0] != '\0', "class loader has no name and id"); 133 _name_and_id = SymbolTable::new_symbol(cl_instance_name_and_id); 134 } 135 136 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool has_class_mirror_holder) : 137 _metaspace(nullptr), 138 _metaspace_lock(new Mutex(Mutex::nosafepoint-2, "MetaspaceAllocation_lock")), 139 _unloading(false), _has_class_mirror_holder(has_class_mirror_holder), 140 _modified_oops(true), 141 // A non-strong hidden class loader data doesn't have anything to keep 142 // it from being unloaded during parsing of the non-strong hidden class. 143 // The null-class-loader should always be kept alive. 144 _keep_alive((has_class_mirror_holder || h_class_loader.is_null()) ? 1 : 0), 145 _claim(0), 146 _handles(), 147 _klasses(nullptr), _packages(nullptr), _modules(nullptr), _unnamed_module(nullptr), _dictionary(nullptr), 148 _jmethod_ids(nullptr), 149 _deallocate_list(nullptr), 150 _next(nullptr), 151 _class_loader_klass(nullptr), _name(nullptr), _name_and_id(nullptr) { 152 153 if (!h_class_loader.is_null()) { 154 _class_loader = _handles.add(h_class_loader()); 155 _class_loader_klass = h_class_loader->klass(); 156 initialize_name(h_class_loader); 157 } 158 159 if (!has_class_mirror_holder) { 160 // The holder is initialized later for non-strong hidden classes, 161 // and before calling anything that call class_loader(). 162 initialize_holder(h_class_loader); 163 164 // A ClassLoaderData created solely for a non-strong hidden class should never 165 // have a ModuleEntryTable or PackageEntryTable created for it. 166 _packages = new PackageEntryTable(); 167 if (h_class_loader.is_null()) { 168 // Create unnamed module for boot loader 169 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this); 170 } else { 171 // Create unnamed module for all other loaders 172 _unnamed_module = ModuleEntry::create_unnamed_module(this); 173 } 174 _dictionary = create_dictionary(); 175 } 176 177 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies 178 179 JFR_ONLY(INIT_ID(this);) 180 } 181 182 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() { 183 Chunk* c = _head; 184 while (c != nullptr) { 185 Chunk* next = c->_next; 186 delete c; 187 c = next; 188 } 189 } 190 191 OopHandle ClassLoaderData::ChunkedHandleList::add(oop o) { 192 if (_head == nullptr || _head->_size == Chunk::CAPACITY) { 193 Chunk* next = new Chunk(_head); 194 Atomic::release_store(&_head, next); 195 } 196 oop* handle = &_head->_data[_head->_size]; 197 NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, o); 198 Atomic::release_store(&_head->_size, _head->_size + 1); 199 return OopHandle(handle); 200 } 201 202 int ClassLoaderData::ChunkedHandleList::count() const { 203 int count = 0; 204 Chunk* chunk = _head; 205 while (chunk != nullptr) { 206 count += chunk->_size; 207 chunk = chunk->_next; 208 } 209 return count; 210 } 211 212 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) { 213 for (juint i = 0; i < size; i++) { 214 f->do_oop(&c->_data[i]); 215 } 216 } 217 218 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) { 219 Chunk* head = Atomic::load_acquire(&_head); 220 if (head != nullptr) { 221 // Must be careful when reading size of head 222 oops_do_chunk(f, head, Atomic::load_acquire(&head->_size)); 223 for (Chunk* c = head->_next; c != nullptr; c = c->_next) { 224 oops_do_chunk(f, c, c->_size); 225 } 226 } 227 } 228 229 class VerifyContainsOopClosure : public OopClosure { 230 oop _target; 231 bool _found; 232 233 public: 234 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {} 235 236 void do_oop(oop* p) { 237 if (p != nullptr && NativeAccess<AS_NO_KEEPALIVE>::oop_load(p) == _target) { 238 _found = true; 239 } 240 } 241 242 void do_oop(narrowOop* p) { 243 // The ChunkedHandleList should not contain any narrowOop 244 ShouldNotReachHere(); 245 } 246 247 bool found() const { 248 return _found; 249 } 250 }; 251 252 bool ClassLoaderData::ChunkedHandleList::contains(oop p) { 253 VerifyContainsOopClosure cl(p); 254 oops_do(&cl); 255 return cl.found(); 256 } 257 258 #ifndef PRODUCT 259 bool ClassLoaderData::ChunkedHandleList::owner_of(oop* oop_handle) { 260 Chunk* chunk = _head; 261 while (chunk != nullptr) { 262 if (&(chunk->_data[0]) <= oop_handle && oop_handle < &(chunk->_data[chunk->_size])) { 263 return true; 264 } 265 chunk = chunk->_next; 266 } 267 return false; 268 } 269 #endif // PRODUCT 270 271 void ClassLoaderData::clear_claim(int claim) { 272 for (;;) { 273 int old_claim = Atomic::load(&_claim); 274 if ((old_claim & claim) == 0) { 275 return; 276 } 277 int new_claim = old_claim & ~claim; 278 if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) { 279 return; 280 } 281 } 282 } 283 284 #ifdef ASSERT 285 void ClassLoaderData::verify_not_claimed(int claim) { 286 assert((_claim & claim) == 0, "Found claim: %d bits in _claim: %d", claim, _claim); 287 } 288 #endif 289 290 bool ClassLoaderData::try_claim(int claim) { 291 for (;;) { 292 int old_claim = Atomic::load(&_claim); 293 if ((old_claim & claim) == claim) { 294 return false; 295 } 296 int new_claim = old_claim | claim; 297 if (Atomic::cmpxchg(&_claim, old_claim, new_claim) == old_claim) { 298 return true; 299 } 300 } 301 } 302 303 // Non-strong hidden classes have their own ClassLoaderData that is marked to keep alive 304 // while the class is being parsed, and if the class appears on the module fixup list. 305 // Due to the uniqueness that no other class shares the hidden class' name or 306 // ClassLoaderData, no other non-GC thread has knowledge of the hidden class while 307 // it is being defined, therefore _keep_alive is not volatile or atomic. 308 void ClassLoaderData::inc_keep_alive() { 309 if (has_class_mirror_holder()) { 310 assert(_keep_alive > 0, "Invalid keep alive increment count"); 311 _keep_alive++; 312 } 313 } 314 315 void ClassLoaderData::dec_keep_alive() { 316 if (has_class_mirror_holder()) { 317 assert(_keep_alive > 0, "Invalid keep alive decrement count"); 318 _keep_alive--; 319 } 320 } 321 322 void ClassLoaderData::oops_do(OopClosure* f, int claim_value, bool clear_mod_oops) { 323 if (claim_value != ClassLoaderData::_claim_none && !try_claim(claim_value)) { 324 return; 325 } 326 327 // Only clear modified_oops after the ClassLoaderData is claimed. 328 if (clear_mod_oops) { 329 clear_modified_oops(); 330 } 331 332 _handles.oops_do(f); 333 } 334 335 void ClassLoaderData::classes_do(KlassClosure* klass_closure) { 336 // Lock-free access requires load_acquire 337 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 338 klass_closure->do_klass(k); 339 assert(k != k->next_link(), "no loops!"); 340 } 341 } 342 343 void ClassLoaderData::classes_do(void f(Klass * const)) { 344 // Lock-free access requires load_acquire 345 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 346 f(k); 347 assert(k != k->next_link(), "no loops!"); 348 } 349 } 350 351 void ClassLoaderData::methods_do(void f(Method*)) { 352 // Lock-free access requires load_acquire 353 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 354 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) { 355 InstanceKlass::cast(k)->methods_do(f); 356 } 357 } 358 } 359 360 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) { 361 // To call this, one must have the MultiArray_lock held, but the _klasses list still has lock free reads. 362 assert_locked_or_safepoint(MultiArray_lock); 363 364 // Lock-free access requires load_acquire 365 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 366 // Filter out InstanceKlasses (or their ObjArrayKlasses) that have not entered the 367 // loaded state. 368 if (k->is_instance_klass()) { 369 if (!InstanceKlass::cast(k)->is_loaded()) { 370 continue; 371 } 372 } else if (k->is_shared() && k->is_objArray_klass()) { 373 Klass* bottom = ObjArrayKlass::cast(k)->bottom_klass(); 374 if (bottom->is_instance_klass() && !InstanceKlass::cast(bottom)->is_loaded()) { 375 // This could happen if <bottom> is a shared class that has been restored 376 // but is not yet marked as loaded. All archived array classes of the 377 // bottom class are already restored and placed in the _klasses list. 378 continue; 379 } 380 } 381 382 #ifdef ASSERT 383 oop m = k->java_mirror(); 384 assert(m != nullptr, "nullptr mirror"); 385 assert(m->is_a(vmClasses::Class_klass()), "invalid mirror"); 386 #endif 387 klass_closure->do_klass(k); 388 } 389 } 390 391 void ClassLoaderData::classes_do(void f(InstanceKlass*)) { 392 // Lock-free access requires load_acquire 393 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 394 if (k->is_instance_klass()) { 395 f(InstanceKlass::cast(k)); 396 } 397 assert(k != k->next_link(), "no loops!"); 398 } 399 } 400 401 void ClassLoaderData::modules_do(void f(ModuleEntry*)) { 402 assert_locked_or_safepoint(Module_lock); 403 if (_unnamed_module != nullptr) { 404 f(_unnamed_module); 405 } 406 if (_modules != nullptr) { 407 _modules->modules_do(f); 408 } 409 } 410 411 void ClassLoaderData::packages_do(void f(PackageEntry*)) { 412 assert_locked_or_safepoint(Module_lock); 413 if (_packages != nullptr) { 414 _packages->packages_do(f); 415 } 416 } 417 418 void ClassLoaderData::record_dependency(const Klass* k) { 419 assert(k != nullptr, "invariant"); 420 421 ClassLoaderData * const from_cld = this; 422 ClassLoaderData * const to_cld = k->class_loader_data(); 423 424 // Do not need to record dependency if the dependency is to a class whose 425 // class loader data is never freed. (i.e. the dependency's class loader 426 // is one of the three builtin class loaders and the dependency's class 427 // loader data has a ClassLoader holder, not a Class holder.) 428 if (to_cld->is_permanent_class_loader_data()) { 429 return; 430 } 431 432 oop to; 433 if (to_cld->has_class_mirror_holder()) { 434 // Just return if a non-strong hidden class class is attempting to record a dependency 435 // to itself. (Note that every non-strong hidden class has its own unique class 436 // loader data.) 437 if (to_cld == from_cld) { 438 return; 439 } 440 // Hidden class dependencies are through the mirror. 441 to = k->java_mirror(); 442 } else { 443 to = to_cld->class_loader(); 444 oop from = from_cld->class_loader(); 445 446 // Just return if this dependency is to a class with the same or a parent 447 // class_loader. 448 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) { 449 return; // this class loader is in the parent list, no need to add it. 450 } 451 } 452 453 // It's a dependency we won't find through GC, add it. 454 if (!_handles.contains(to)) { 455 NOT_PRODUCT(Atomic::inc(&_dependency_count)); 456 LogTarget(Trace, class, loader, data) lt; 457 if (lt.is_enabled()) { 458 ResourceMark rm; 459 LogStream ls(lt); 460 ls.print("adding dependency from "); 461 print_value_on(&ls); 462 ls.print(" to "); 463 to_cld->print_value_on(&ls); 464 ls.cr(); 465 } 466 Handle dependency(Thread::current(), to); 467 add_handle(dependency); 468 // Added a potentially young gen oop to the ClassLoaderData 469 record_modified_oops(); 470 } 471 } 472 473 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) { 474 { 475 MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 476 Klass* old_value = _klasses; 477 k->set_next_link(old_value); 478 // Link the new item into the list, making sure the linked class is stable 479 // since the list can be walked without a lock 480 Atomic::release_store(&_klasses, k); 481 if (k->is_array_klass()) { 482 ClassLoaderDataGraph::inc_array_classes(1); 483 } else { 484 ClassLoaderDataGraph::inc_instance_classes(1); 485 } 486 } 487 488 if (publicize) { 489 LogTarget(Trace, class, loader, data) lt; 490 if (lt.is_enabled()) { 491 ResourceMark rm; 492 LogStream ls(lt); 493 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name()); 494 print_value_on(&ls); 495 ls.cr(); 496 } 497 } 498 } 499 500 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) { 501 if (loader_or_mirror() != nullptr) { 502 assert(_holder.is_null(), "never replace holders"); 503 _holder = WeakHandle(Universe::vm_weak(), loader_or_mirror); 504 } 505 } 506 507 // Remove a klass from the _klasses list for scratch_class during redefinition 508 // or parsed class in the case of an error. 509 void ClassLoaderData::remove_class(Klass* scratch_class) { 510 assert_locked_or_safepoint(ClassLoaderDataGraph_lock); 511 512 // Adjust global class iterator. 513 ClassLoaderDataGraph::adjust_saved_class(scratch_class); 514 515 Klass* prev = nullptr; 516 for (Klass* k = _klasses; k != nullptr; k = k->next_link()) { 517 if (k == scratch_class) { 518 if (prev == nullptr) { 519 _klasses = k->next_link(); 520 } else { 521 Klass* next = k->next_link(); 522 prev->set_next_link(next); 523 } 524 525 if (k->is_array_klass()) { 526 ClassLoaderDataGraph::dec_array_classes(1); 527 } else { 528 ClassLoaderDataGraph::dec_instance_classes(1); 529 } 530 531 return; 532 } 533 prev = k; 534 assert(k != k->next_link(), "no loops!"); 535 } 536 ShouldNotReachHere(); // should have found this class!! 537 } 538 539 void ClassLoaderData::unload() { 540 _unloading = true; 541 542 LogTarget(Trace, class, loader, data) lt; 543 if (lt.is_enabled()) { 544 ResourceMark rm; 545 LogStream ls(lt); 546 ls.print("unload"); 547 print_value_on(&ls); 548 ls.cr(); 549 } 550 551 // Some items on the _deallocate_list need to free their C heap structures 552 // if they are not already on the _klasses list. 553 free_deallocate_list_C_heap_structures(); 554 555 // Clean up class dependencies and tell serviceability tools 556 // these classes are unloading. Must be called 557 // after erroneous classes are released. 558 classes_do(InstanceKlass::unload_class); 559 560 // Method::clear_jmethod_ids only sets the jmethod_ids to null without 561 // releasing the memory for related JNIMethodBlocks and JNIMethodBlockNodes. 562 // This is done intentionally because native code (e.g. JVMTI agent) holding 563 // jmethod_ids may access them after the associated classes and class loader 564 // are unloaded. The Java Native Interface Specification says "method ID 565 // does not prevent the VM from unloading the class from which the ID has 566 // been derived. After the class is unloaded, the method or field ID becomes 567 // invalid". In real world usages, the native code may rely on jmethod_ids 568 // being null after class unloading. Hence, it is unsafe to free the memory 569 // from the VM side without knowing when native code is going to stop using 570 // them. 571 if (_jmethod_ids != nullptr) { 572 Method::clear_jmethod_ids(this); 573 } 574 575 // Clean up global class iterator for compiler 576 ClassLoaderDataGraph::adjust_saved_class(this); 577 } 578 579 ModuleEntryTable* ClassLoaderData::modules() { 580 // Lazily create the module entry table at first request. 581 // Lock-free access requires load_acquire. 582 ModuleEntryTable* modules = Atomic::load_acquire(&_modules); 583 if (modules == nullptr) { 584 MutexLocker m1(Module_lock); 585 // Check if _modules got allocated while we were waiting for this lock. 586 if ((modules = _modules) == nullptr) { 587 modules = new ModuleEntryTable(); 588 589 { 590 MutexLocker m1(metaspace_lock(), Mutex::_no_safepoint_check_flag); 591 // Ensure _modules is stable, since it is examined without a lock 592 Atomic::release_store(&_modules, modules); 593 } 594 } 595 } 596 return modules; 597 } 598 599 const int _boot_loader_dictionary_size = 1009; 600 const int _default_loader_dictionary_size = 107; 601 602 Dictionary* ClassLoaderData::create_dictionary() { 603 assert(!has_class_mirror_holder(), "class mirror holder cld does not have a dictionary"); 604 int size; 605 if (_the_null_class_loader_data == nullptr) { 606 size = _boot_loader_dictionary_size; 607 } else if (class_loader()->is_a(vmClasses::reflect_DelegatingClassLoader_klass())) { 608 size = 1; // there's only one class in relection class loader and no initiated classes 609 } else if (is_system_class_loader_data()) { 610 size = _boot_loader_dictionary_size; 611 } else { 612 size = _default_loader_dictionary_size; 613 } 614 return new Dictionary(this, size); 615 } 616 617 // Tell the GC to keep this klass alive. Needed while iterating ClassLoaderDataGraph, 618 // and any runtime code that uses klasses. 619 oop ClassLoaderData::holder() const { 620 // A klass that was previously considered dead can be looked up in the 621 // CLD/SD, and its _java_mirror or _class_loader can be stored in a root 622 // or a reachable object making it alive again. The SATB part of G1 needs 623 // to get notified about this potential resurrection, otherwise the marking 624 // might not find the object. 625 if (!_holder.is_null()) { // null class_loader 626 return _holder.resolve(); 627 } else { 628 return nullptr; 629 } 630 } 631 632 // Let the GC read the holder without keeping it alive. 633 oop ClassLoaderData::holder_no_keepalive() const { 634 if (!_holder.is_null()) { // null class_loader 635 return _holder.peek(); 636 } else { 637 return nullptr; 638 } 639 } 640 641 // Unloading support 642 bool ClassLoaderData::is_alive() const { 643 bool alive = keep_alive() // null class loader and incomplete non-strong hidden class. 644 || (_holder.peek() != nullptr); // and not cleaned by the GC weak handle processing. 645 646 return alive; 647 } 648 649 class ReleaseKlassClosure: public KlassClosure { 650 private: 651 size_t _instance_class_released; 652 size_t _array_class_released; 653 public: 654 ReleaseKlassClosure() : _instance_class_released(0), _array_class_released(0) { } 655 656 size_t instance_class_released() const { return _instance_class_released; } 657 size_t array_class_released() const { return _array_class_released; } 658 659 void do_klass(Klass* k) { 660 if (k->is_array_klass()) { 661 _array_class_released ++; 662 } else { 663 assert(k->is_instance_klass(), "Must be"); 664 _instance_class_released ++; 665 } 666 k->release_C_heap_structures(); 667 } 668 }; 669 670 ClassLoaderData::~ClassLoaderData() { 671 // Release C heap structures for all the classes. 672 ReleaseKlassClosure cl; 673 classes_do(&cl); 674 675 ClassLoaderDataGraph::dec_array_classes(cl.array_class_released()); 676 ClassLoaderDataGraph::dec_instance_classes(cl.instance_class_released()); 677 678 // Release the WeakHandle 679 _holder.release(Universe::vm_weak()); 680 681 // Release C heap allocated hashtable for all the packages. 682 if (_packages != nullptr) { 683 // Destroy the table itself 684 delete _packages; 685 _packages = nullptr; 686 } 687 688 // Release C heap allocated hashtable for all the modules. 689 if (_modules != nullptr) { 690 // Destroy the table itself 691 delete _modules; 692 _modules = nullptr; 693 } 694 695 // Release C heap allocated hashtable for the dictionary 696 if (_dictionary != nullptr) { 697 // Destroy the table itself 698 delete _dictionary; 699 _dictionary = nullptr; 700 } 701 702 if (_unnamed_module != nullptr) { 703 delete _unnamed_module; 704 _unnamed_module = nullptr; 705 } 706 707 // release the metaspace 708 ClassLoaderMetaspace *m = _metaspace; 709 if (m != nullptr) { 710 _metaspace = nullptr; 711 delete m; 712 } 713 714 // Delete lock 715 delete _metaspace_lock; 716 717 // Delete free list 718 if (_deallocate_list != nullptr) { 719 delete _deallocate_list; 720 } 721 722 // Decrement refcounts of Symbols if created. 723 if (_name != nullptr) { 724 _name->decrement_refcount(); 725 } 726 if (_name_and_id != nullptr) { 727 _name_and_id->decrement_refcount(); 728 } 729 } 730 731 // Returns true if this class loader data is for the app class loader 732 // or a user defined system class loader. (Note that the class loader 733 // data may have a Class holder.) 734 bool ClassLoaderData::is_system_class_loader_data() const { 735 return SystemDictionary::is_system_class_loader(class_loader()); 736 } 737 738 // Returns true if this class loader data is for the platform class loader. 739 // (Note that the class loader data may have a Class holder.) 740 bool ClassLoaderData::is_platform_class_loader_data() const { 741 return SystemDictionary::is_platform_class_loader(class_loader()); 742 } 743 744 // Returns true if the class loader for this class loader data is one of 745 // the 3 builtin (boot application/system or platform) class loaders, 746 // including a user-defined system class loader. Note that if the class 747 // loader data is for a non-strong hidden class then it may 748 // get freed by a GC even if its class loader is one of these loaders. 749 bool ClassLoaderData::is_builtin_class_loader_data() const { 750 return (is_boot_class_loader_data() || 751 SystemDictionary::is_system_class_loader(class_loader()) || 752 SystemDictionary::is_platform_class_loader(class_loader())); 753 } 754 755 // Returns true if this class loader data is a class loader data 756 // that is not ever freed by a GC. It must be the CLD for one of the builtin 757 // class loaders and not the CLD for a non-strong hidden class. 758 bool ClassLoaderData::is_permanent_class_loader_data() const { 759 return is_builtin_class_loader_data() && !has_class_mirror_holder(); 760 } 761 762 ClassLoaderMetaspace* ClassLoaderData::metaspace_non_null() { 763 // If the metaspace has not been allocated, create a new one. Might want 764 // to create smaller arena for Reflection class loaders also. 765 // The reason for the delayed allocation is because some class loaders are 766 // simply for delegating with no metadata of their own. 767 // Lock-free access requires load_acquire. 768 ClassLoaderMetaspace* metaspace = Atomic::load_acquire(&_metaspace); 769 if (metaspace == nullptr) { 770 MutexLocker ml(_metaspace_lock, Mutex::_no_safepoint_check_flag); 771 // Check if _metaspace got allocated while we were waiting for this lock. 772 if ((metaspace = _metaspace) == nullptr) { 773 if (this == the_null_class_loader_data()) { 774 assert (class_loader() == nullptr, "Must be"); 775 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::BootMetaspaceType); 776 } else if (has_class_mirror_holder()) { 777 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ClassMirrorHolderMetaspaceType); 778 } else if (class_loader()->is_a(vmClasses::reflect_DelegatingClassLoader_klass())) { 779 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType); 780 } else { 781 metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType); 782 } 783 // Ensure _metaspace is stable, since it is examined without a lock 784 Atomic::release_store(&_metaspace, metaspace); 785 } 786 } 787 return metaspace; 788 } 789 790 OopHandle ClassLoaderData::add_handle(Handle h) { 791 MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 792 record_modified_oops(); 793 return _handles.add(h()); 794 } 795 796 void ClassLoaderData::remove_handle(OopHandle h) { 797 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading"); 798 oop* ptr = h.ptr_raw(); 799 if (ptr != nullptr) { 800 assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); 801 NativeAccess<>::oop_store(ptr, oop(nullptr)); 802 } 803 } 804 805 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) { 806 MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 807 if (dest.resolve() != nullptr) { 808 return; 809 } else { 810 record_modified_oops(); 811 dest = _handles.add(h()); 812 } 813 } 814 815 // Add this metadata pointer to be freed when it's safe. This is only during 816 // a safepoint which checks if handles point to this metadata field. 817 void ClassLoaderData::add_to_deallocate_list(Metadata* m) { 818 // Metadata in shared region isn't deleted. 819 if (!m->is_shared()) { 820 MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); 821 if (_deallocate_list == nullptr) { 822 _deallocate_list = new (mtClass) GrowableArray<Metadata*>(100, mtClass); 823 } 824 _deallocate_list->append_if_missing(m); 825 ResourceMark rm; 826 log_debug(class, loader, data)("deallocate added for %s", m->print_value_string()); 827 ClassLoaderDataGraph::set_should_clean_deallocate_lists(); 828 } 829 } 830 831 // Deallocate free metadata on the free list. How useful the PermGen was! 832 void ClassLoaderData::free_deallocate_list() { 833 // This must be called at a safepoint because it depends on metadata walking at 834 // safepoint cleanup time. 835 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 836 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading"); 837 if (_deallocate_list == nullptr) { 838 return; 839 } 840 // Go backwards because this removes entries that are freed. 841 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 842 Metadata* m = _deallocate_list->at(i); 843 if (!m->on_stack()) { 844 _deallocate_list->remove_at(i); 845 // There are only three types of metadata that we deallocate directly. 846 // Cast them so they can be used by the template function. 847 if (m->is_method()) { 848 MetadataFactory::free_metadata(this, (Method*)m); 849 } else if (m->is_constantPool()) { 850 MetadataFactory::free_metadata(this, (ConstantPool*)m); 851 } else if (m->is_klass()) { 852 MetadataFactory::free_metadata(this, (InstanceKlass*)m); 853 } else { 854 ShouldNotReachHere(); 855 } 856 } else { 857 // Metadata is alive. 858 // If scratch_class is on stack then it shouldn't be on this list! 859 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), 860 "scratch classes on this list should be dead"); 861 // Also should assert that other metadata on the list was found in handles. 862 // Some cleaning remains. 863 ClassLoaderDataGraph::set_should_clean_deallocate_lists(); 864 } 865 } 866 } 867 868 // This is distinct from free_deallocate_list. For class loader data that are 869 // unloading, this frees the C heap memory for items on the list, and unlinks 870 // scratch or error classes so that unloading events aren't triggered for these 871 // classes. The metadata is removed with the unloading metaspace. 872 // There isn't C heap memory allocated for methods, so nothing is done for them. 873 void ClassLoaderData::free_deallocate_list_C_heap_structures() { 874 assert_locked_or_safepoint(ClassLoaderDataGraph_lock); 875 assert(is_unloading(), "only called for ClassLoaderData that are unloading"); 876 if (_deallocate_list == nullptr) { 877 return; 878 } 879 // Go backwards because this removes entries that are freed. 880 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 881 Metadata* m = _deallocate_list->at(i); 882 _deallocate_list->remove_at(i); 883 if (m->is_constantPool()) { 884 ((ConstantPool*)m)->release_C_heap_structures(); 885 } else if (m->is_klass()) { 886 InstanceKlass* ik = (InstanceKlass*)m; 887 // also releases ik->constants() C heap memory 888 ik->release_C_heap_structures(); 889 // Remove the class so unloading events aren't triggered for 890 // this class (scratch or error class) in do_unloading(). 891 remove_class(ik); 892 // But still have to remove it from the dumptime_table. 893 SystemDictionaryShared::handle_class_unloading(ik); 894 } 895 } 896 } 897 898 // Caller needs ResourceMark 899 // If the class loader's _name has not been explicitly set, the class loader's 900 // qualified class name is returned. 901 const char* ClassLoaderData::loader_name() const { 902 if (_class_loader_klass == nullptr) { 903 return BOOTSTRAP_LOADER_NAME; 904 } else if (_name != nullptr) { 905 return _name->as_C_string(); 906 } else { 907 return _class_loader_klass->external_name(); 908 } 909 } 910 911 // Caller needs ResourceMark 912 // Format of the _name_and_id is as follows: 913 // If the defining loader has a name explicitly set then '<loader-name>' @<id> 914 // If the defining loader has no name then <qualified-class-name> @<id> 915 // If built-in loader, then omit '@<id>' as there is only one instance. 916 const char* ClassLoaderData::loader_name_and_id() const { 917 if (_class_loader_klass == nullptr) { 918 return "'" BOOTSTRAP_LOADER_NAME "'"; 919 } else if (_name_and_id != nullptr) { 920 return _name_and_id->as_C_string(); 921 } else { 922 // May be called in a race before _name_and_id is initialized. 923 return _class_loader_klass->external_name(); 924 } 925 } 926 927 void ClassLoaderData::print_value_on(outputStream* out) const { 928 if (!is_unloading() && class_loader() != nullptr) { 929 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this)); 930 class_loader()->print_value_on(out); // includes loader_name_and_id() and address of class loader instance 931 } else { 932 // loader data: 0xsomeaddr of 'bootstrap' 933 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name_and_id()); 934 } 935 if (_has_class_mirror_holder) { 936 out->print(" has a class holder"); 937 } 938 } 939 940 void ClassLoaderData::print_value() const { print_value_on(tty); } 941 942 #ifndef PRODUCT 943 class PrintKlassClosure: public KlassClosure { 944 outputStream* _out; 945 public: 946 PrintKlassClosure(outputStream* out): _out(out) { } 947 948 void do_klass(Klass* k) { 949 ResourceMark rm; 950 _out->print("%s,", k->external_name()); 951 } 952 }; 953 954 void ClassLoaderData::print_on(outputStream* out) const { 955 ResourceMark rm; 956 out->print_cr("ClassLoaderData(" INTPTR_FORMAT ")", p2i(this)); 957 out->print_cr(" - name %s", loader_name_and_id()); 958 if (!_holder.is_null()) { 959 out->print (" - holder "); 960 _holder.print_on(out); 961 out->print_cr(""); 962 } 963 out->print_cr(" - class loader " INTPTR_FORMAT, p2i(_class_loader.ptr_raw())); 964 out->print_cr(" - metaspace " INTPTR_FORMAT, p2i(_metaspace)); 965 out->print_cr(" - unloading %s", _unloading ? "true" : "false"); 966 out->print_cr(" - class mirror holder %s", _has_class_mirror_holder ? "true" : "false"); 967 out->print_cr(" - modified oops %s", _modified_oops ? "true" : "false"); 968 out->print_cr(" - keep alive %d", _keep_alive); 969 out->print (" - claim "); 970 switch(_claim) { 971 case _claim_none: out->print_cr("none"); break; 972 case _claim_finalizable: out->print_cr("finalizable"); break; 973 case _claim_strong: out->print_cr("strong"); break; 974 case _claim_stw_fullgc_mark: out->print_cr("stw full gc mark"); break; 975 case _claim_stw_fullgc_adjust: out->print_cr("stw full gc adjust"); break; 976 case _claim_other: out->print_cr("other"); break; 977 case _claim_other | _claim_finalizable: out->print_cr("other and finalizable"); break; 978 case _claim_other | _claim_strong: out->print_cr("other and strong"); break; 979 default: ShouldNotReachHere(); 980 } 981 out->print_cr(" - handles %d", _handles.count()); 982 out->print_cr(" - dependency count %d", _dependency_count); 983 out->print (" - klasses { "); 984 if (Verbose) { 985 PrintKlassClosure closure(out); 986 ((ClassLoaderData*)this)->classes_do(&closure); 987 } else { 988 out->print("..."); 989 } 990 out->print_cr(" }"); 991 out->print_cr(" - packages " INTPTR_FORMAT, p2i(_packages)); 992 out->print_cr(" - module " INTPTR_FORMAT, p2i(_modules)); 993 out->print_cr(" - unnamed module " INTPTR_FORMAT, p2i(_unnamed_module)); 994 if (_dictionary != nullptr) { 995 out->print (" - dictionary " INTPTR_FORMAT " ", p2i(_dictionary)); 996 _dictionary->print_size(out); 997 } else { 998 out->print_cr(" - dictionary " INTPTR_FORMAT, p2i(_dictionary)); 999 } 1000 if (_jmethod_ids != nullptr) { 1001 out->print (" - jmethod count "); 1002 Method::print_jmethod_ids_count(this, out); 1003 out->print_cr(""); 1004 } 1005 out->print_cr(" - deallocate list " INTPTR_FORMAT, p2i(_deallocate_list)); 1006 out->print_cr(" - next CLD " INTPTR_FORMAT, p2i(_next)); 1007 } 1008 #endif // PRODUCT 1009 1010 void ClassLoaderData::print() const { print_on(tty); } 1011 1012 class VerifyHandleOops : public OopClosure { 1013 VerifyOopClosure vc; 1014 public: 1015 virtual void do_oop(oop* p) { 1016 if (p != nullptr && *p != nullptr) { 1017 oop o = *p; 1018 if (!java_lang_Class::is_instance(o)) { 1019 // is_instance will assert for an invalid oop. 1020 // Walk the resolved_references array and other assorted oops in the 1021 // CLD::_handles field. The mirror oops are followed by other heap roots. 1022 o->oop_iterate(&vc); 1023 } 1024 } 1025 } 1026 virtual void do_oop(narrowOop* o) { ShouldNotReachHere(); } 1027 }; 1028 1029 void ClassLoaderData::verify() { 1030 assert_locked_or_safepoint(_metaspace_lock); 1031 oop cl = class_loader(); 1032 1033 guarantee(this == class_loader_data(cl) || has_class_mirror_holder(), "Must be the same"); 1034 guarantee(cl != nullptr || this == ClassLoaderData::the_null_class_loader_data() || has_class_mirror_holder(), "must be"); 1035 1036 // Verify the integrity of the allocated space. 1037 #ifdef ASSERT 1038 if (metaspace_or_null() != nullptr) { 1039 metaspace_or_null()->verify(); 1040 } 1041 #endif 1042 1043 for (Klass* k = _klasses; k != nullptr; k = k->next_link()) { 1044 guarantee(k->class_loader_data() == this, "Must be the same"); 1045 k->verify(); 1046 assert(k != k->next_link(), "no loops!"); 1047 } 1048 1049 if (_modules != nullptr) { 1050 _modules->verify(); 1051 } 1052 1053 if (_deallocate_list != nullptr) { 1054 for (int i = _deallocate_list->length() - 1; i >= 0; i--) { 1055 Metadata* m = _deallocate_list->at(i); 1056 if (m->is_klass()) { 1057 ((Klass*)m)->verify(); 1058 } 1059 } 1060 } 1061 1062 // Check the oops in the handles area 1063 VerifyHandleOops vho; 1064 oops_do(&vho, _claim_none, false); 1065 } 1066 1067 bool ClassLoaderData::contains_klass(Klass* klass) { 1068 // Lock-free access requires load_acquire 1069 for (Klass* k = Atomic::load_acquire(&_klasses); k != nullptr; k = k->next_link()) { 1070 if (k == klass) return true; 1071 } 1072 return false; 1073 }