1 /* 2 * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "cds/archiveUtils.hpp" 27 #include "cds/cdsConfig.hpp" 28 #include "cds/cdsEnumKlass.hpp" 29 #include "cds/classListWriter.hpp" 30 #include "cds/heapShared.hpp" 31 #include "cds/metaspaceShared.hpp" 32 #include "classfile/classFileParser.hpp" 33 #include "classfile/classFileStream.hpp" 34 #include "classfile/classLoader.hpp" 35 #include "classfile/classLoaderData.inline.hpp" 36 #include "classfile/javaClasses.hpp" 37 #include "classfile/moduleEntry.hpp" 38 #include "classfile/systemDictionary.hpp" 39 #include "classfile/systemDictionaryShared.hpp" 40 #include "classfile/verifier.hpp" 41 #include "classfile/vmClasses.hpp" 42 #include "classfile/vmSymbols.hpp" 43 #include "code/codeCache.hpp" 44 #include "code/dependencyContext.hpp" 45 #include "compiler/compilationPolicy.hpp" 46 #include "compiler/compileBroker.hpp" 47 #include "gc/shared/collectedHeap.inline.hpp" 48 #include "interpreter/bytecodeStream.hpp" 49 #include "interpreter/oopMapCache.hpp" 50 #include "interpreter/rewriter.hpp" 51 #include "jvm.h" 52 #include "jvmtifiles/jvmti.h" 53 #include "logging/log.hpp" 54 #include "klass.inline.hpp" 55 #include "logging/logMessage.hpp" 56 #include "logging/logStream.hpp" 57 #include "memory/allocation.inline.hpp" 58 #include "memory/iterator.inline.hpp" 59 #include "memory/metadataFactory.hpp" 60 #include "memory/metaspaceClosure.hpp" 61 #include "memory/oopFactory.hpp" 62 #include "memory/resourceArea.hpp" 63 #include "memory/universe.hpp" 64 #include "oops/fieldStreams.inline.hpp" 65 #include "oops/constantPool.hpp" 66 #include "oops/instanceClassLoaderKlass.hpp" 67 #include "oops/instanceKlass.inline.hpp" 68 #include "oops/instanceMirrorKlass.hpp" 69 #include "oops/instanceOop.hpp" 70 #include "oops/instanceStackChunkKlass.hpp" 71 #include "oops/klass.inline.hpp" 72 #include "oops/method.hpp" 73 #include "oops/oop.inline.hpp" 74 #include "oops/recordComponent.hpp" 75 #include "oops/symbol.hpp" 76 #include "prims/jvmtiExport.hpp" 77 #include "prims/jvmtiRedefineClasses.hpp" 78 #include "prims/jvmtiThreadState.hpp" 79 #include "prims/methodComparator.hpp" 80 #include "runtime/arguments.hpp" 81 #include "runtime/deoptimization.hpp" 82 #include "runtime/atomic.hpp" 83 #include "runtime/fieldDescriptor.inline.hpp" 84 #include "runtime/handles.inline.hpp" 85 #include "runtime/javaCalls.hpp" 86 #include "runtime/javaThread.inline.hpp" 87 #include "runtime/mutexLocker.hpp" 88 #include "runtime/orderAccess.hpp" 89 #include "runtime/os.inline.hpp" 90 #include "runtime/reflection.hpp" 91 #include "runtime/synchronizer.hpp" 92 #include "runtime/threads.hpp" 93 #include "services/classLoadingService.hpp" 94 #include "services/finalizerService.hpp" 95 #include "services/threadService.hpp" 96 #include "utilities/dtrace.hpp" 97 #include "utilities/events.hpp" 98 #include "utilities/macros.hpp" 99 #include "utilities/stringUtils.hpp" 100 #include "utilities/pair.hpp" 101 #ifdef COMPILER1 102 #include "c1/c1_Compiler.hpp" 103 #endif 104 #if INCLUDE_JFR 105 #include "jfr/jfrEvents.hpp" 106 #endif 107 108 #ifdef DTRACE_ENABLED 109 110 111 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 112 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 113 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 114 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 115 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 116 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 117 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 118 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 119 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \ 120 { \ 121 char* data = nullptr; \ 122 int len = 0; \ 123 Symbol* clss_name = name(); \ 124 if (clss_name != nullptr) { \ 125 data = (char*)clss_name->bytes(); \ 126 len = clss_name->utf8_length(); \ 127 } \ 128 HOTSPOT_CLASS_INITIALIZATION_##type( \ 129 data, len, (void*)class_loader(), thread_type); \ 130 } 131 132 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \ 133 { \ 134 char* data = nullptr; \ 135 int len = 0; \ 136 Symbol* clss_name = name(); \ 137 if (clss_name != nullptr) { \ 138 data = (char*)clss_name->bytes(); \ 139 len = clss_name->utf8_length(); \ 140 } \ 141 HOTSPOT_CLASS_INITIALIZATION_##type( \ 142 data, len, (void*)class_loader(), thread_type, wait); \ 143 } 144 145 #else // ndef DTRACE_ENABLED 146 147 #define DTRACE_CLASSINIT_PROBE(type, thread_type) 148 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) 149 150 #endif // ndef DTRACE_ENABLED 151 152 bool InstanceKlass::_finalization_enabled = true; 153 154 static inline bool is_class_loader(const Symbol* class_name, 155 const ClassFileParser& parser) { 156 assert(class_name != nullptr, "invariant"); 157 158 if (class_name == vmSymbols::java_lang_ClassLoader()) { 159 return true; 160 } 161 162 if (vmClasses::ClassLoader_klass_loaded()) { 163 const Klass* const super_klass = parser.super_klass(); 164 if (super_klass != nullptr) { 165 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) { 166 return true; 167 } 168 } 169 } 170 return false; 171 } 172 173 static inline bool is_stack_chunk_class(const Symbol* class_name, 174 const ClassLoaderData* loader_data) { 175 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() && 176 loader_data->is_the_null_class_loader_data()); 177 } 178 179 // private: called to verify that k is a static member of this nest. 180 // We know that k is an instance class in the same package and hence the 181 // same classloader. 182 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const { 183 assert(!is_hidden(), "unexpected hidden class"); 184 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) { 185 if (log_is_enabled(Trace, class, nestmates)) { 186 ResourceMark rm(current); 187 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s", 188 k->external_name(), this->external_name()); 189 } 190 return false; 191 } 192 193 if (log_is_enabled(Trace, class, nestmates)) { 194 ResourceMark rm(current); 195 log_trace(class, nestmates)("Checking nest membership of %s in %s", 196 k->external_name(), this->external_name()); 197 } 198 199 // Check for the named class in _nest_members. 200 // We don't resolve, or load, any classes. 201 for (int i = 0; i < _nest_members->length(); i++) { 202 int cp_index = _nest_members->at(i); 203 Symbol* name = _constants->klass_name_at(cp_index); 204 if (name == k->name()) { 205 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index); 206 return true; 207 } 208 } 209 log_trace(class, nestmates)("- class is NOT a nest member!"); 210 return false; 211 } 212 213 // Called to verify that k is a permitted subclass of this class 214 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k) const { 215 Thread* current = Thread::current(); 216 assert(k != nullptr, "sanity check"); 217 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(), 218 "unexpected empty _permitted_subclasses array"); 219 220 if (log_is_enabled(Trace, class, sealed)) { 221 ResourceMark rm(current); 222 log_trace(class, sealed)("Checking for permitted subclass of %s in %s", 223 k->external_name(), this->external_name()); 224 } 225 226 // Check that the class and its super are in the same module. 227 if (k->module() != this->module()) { 228 ResourceMark rm(current); 229 log_trace(class, sealed)("Check failed for same module of permitted subclass %s and sealed class %s", 230 k->external_name(), this->external_name()); 231 return false; 232 } 233 234 if (!k->is_public() && !is_same_class_package(k)) { 235 ResourceMark rm(current); 236 log_trace(class, sealed)("Check failed, subclass %s not public and not in the same package as sealed class %s", 237 k->external_name(), this->external_name()); 238 return false; 239 } 240 241 for (int i = 0; i < _permitted_subclasses->length(); i++) { 242 int cp_index = _permitted_subclasses->at(i); 243 Symbol* name = _constants->klass_name_at(cp_index); 244 if (name == k->name()) { 245 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index); 246 return true; 247 } 248 } 249 log_trace(class, sealed)("- class is NOT a permitted subclass!"); 250 return false; 251 } 252 253 // Return nest-host class, resolving, validating and saving it if needed. 254 // In cases where this is called from a thread that cannot do classloading 255 // (such as a native JIT thread) then we simply return null, which in turn 256 // causes the access check to return false. Such code will retry the access 257 // from a more suitable environment later. Otherwise the _nest_host is always 258 // set once this method returns. 259 // Any errors from nest-host resolution must be preserved so they can be queried 260 // from higher-level access checking code, and reported as part of access checking 261 // exceptions. 262 // VirtualMachineErrors are propagated with a null return. 263 // Under any conditions where the _nest_host can be set to non-null the resulting 264 // value of it and, if applicable, the nest host resolution/validation error, 265 // are idempotent. 266 InstanceKlass* InstanceKlass::nest_host(TRAPS) { 267 InstanceKlass* nest_host_k = _nest_host; 268 if (nest_host_k != nullptr) { 269 return nest_host_k; 270 } 271 272 ResourceMark rm(THREAD); 273 274 // need to resolve and save our nest-host class. 275 if (_nest_host_index != 0) { // we have a real nest_host 276 // Before trying to resolve check if we're in a suitable context 277 bool can_resolve = THREAD->can_call_java(); 278 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) { 279 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread", 280 this->external_name()); 281 return nullptr; // sentinel to say "try again from a different context" 282 } 283 284 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s", 285 this->external_name(), 286 _constants->klass_name_at(_nest_host_index)->as_C_string()); 287 288 Klass* k = _constants->klass_at(_nest_host_index, THREAD); 289 if (HAS_PENDING_EXCEPTION) { 290 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) { 291 return nullptr; // propagate VMEs 292 } 293 stringStream ss; 294 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string(); 295 ss.print("Nest host resolution of %s with host %s failed: ", 296 this->external_name(), target_host_class); 297 java_lang_Throwable::print(PENDING_EXCEPTION, &ss); 298 const char* msg = ss.as_string(true /* on C-heap */); 299 constantPoolHandle cph(THREAD, constants()); 300 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg); 301 CLEAR_PENDING_EXCEPTION; 302 303 log_trace(class, nestmates)("%s", msg); 304 } else { 305 // A valid nest-host is an instance class in the current package that lists this 306 // class as a nest member. If any of these conditions are not met the class is 307 // its own nest-host. 308 const char* error = nullptr; 309 310 // JVMS 5.4.4 indicates package check comes first 311 if (is_same_class_package(k)) { 312 // Now check actual membership. We can't be a member if our "host" is 313 // not an instance class. 314 if (k->is_instance_klass()) { 315 nest_host_k = InstanceKlass::cast(k); 316 bool is_member = nest_host_k->has_nest_member(THREAD, this); 317 if (is_member) { 318 _nest_host = nest_host_k; // save resolved nest-host value 319 320 log_trace(class, nestmates)("Resolved nest-host of %s to %s", 321 this->external_name(), k->external_name()); 322 return nest_host_k; 323 } else { 324 error = "current type is not listed as a nest member"; 325 } 326 } else { 327 error = "host is not an instance class"; 328 } 329 } else { 330 error = "types are in different packages"; 331 } 332 333 // something went wrong, so record what and log it 334 { 335 stringStream ss; 336 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s", 337 this->external_name(), 338 this->class_loader_data()->loader_name_and_id(), 339 k->external_name(), 340 k->class_loader_data()->loader_name_and_id(), 341 error); 342 const char* msg = ss.as_string(true /* on C-heap */); 343 constantPoolHandle cph(THREAD, constants()); 344 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg); 345 log_trace(class, nestmates)("%s", msg); 346 } 347 } 348 } else { 349 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self", 350 this->external_name()); 351 } 352 353 // Either not in an explicit nest, or else an error occurred, so 354 // the nest-host is set to `this`. Any thread that sees this assignment 355 // will also see any setting of nest_host_error(), if applicable. 356 return (_nest_host = this); 357 } 358 359 // Dynamic nest member support: set this class's nest host to the given class. 360 // This occurs as part of the class definition, as soon as the instanceKlass 361 // has been created and doesn't require further resolution. The code: 362 // lookup().defineHiddenClass(bytes_for_X, NESTMATE); 363 // results in: 364 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost()) 365 // If it has an explicit _nest_host_index or _nest_members, these will be ignored. 366 // We also know the "host" is a valid nest-host in the same package so we can 367 // assert some of those facts. 368 void InstanceKlass::set_nest_host(InstanceKlass* host) { 369 assert(is_hidden(), "must be a hidden class"); 370 assert(host != nullptr, "null nest host specified"); 371 assert(_nest_host == nullptr, "current class has resolved nest-host"); 372 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s", 373 nest_host_error()); 374 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) || 375 (host->_nest_host == host), "proposed host is not a valid nest-host"); 376 // Can't assert this as package is not set yet: 377 // assert(is_same_class_package(host), "proposed host is in wrong package"); 378 379 if (log_is_enabled(Trace, class, nestmates)) { 380 ResourceMark rm; 381 const char* msg = ""; 382 // a hidden class does not expect a statically defined nest-host 383 if (_nest_host_index > 0) { 384 msg = "(the NestHost attribute in the current class is ignored)"; 385 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) { 386 msg = "(the NestMembers attribute in the current class is ignored)"; 387 } 388 log_trace(class, nestmates)("Injected type %s into the nest of %s %s", 389 this->external_name(), 390 host->external_name(), 391 msg); 392 } 393 // set dynamic nest host 394 _nest_host = host; 395 // Record dependency to keep nest host from being unloaded before this class. 396 ClassLoaderData* this_key = class_loader_data(); 397 assert(this_key != nullptr, "sanity"); 398 this_key->record_dependency(host); 399 } 400 401 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host, 402 // or we are k's nest_host - all of which is covered by comparing the two 403 // resolved_nest_hosts. 404 // Any exceptions (i.e. VMEs) are propagated. 405 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) { 406 407 assert(this != k, "this should be handled by higher-level code"); 408 409 // Per JVMS 5.4.4 we first resolve and validate the current class, then 410 // the target class k. 411 412 InstanceKlass* cur_host = nest_host(CHECK_false); 413 if (cur_host == nullptr) { 414 return false; 415 } 416 417 Klass* k_nest_host = k->nest_host(CHECK_false); 418 if (k_nest_host == nullptr) { 419 return false; 420 } 421 422 bool access = (cur_host == k_nest_host); 423 424 ResourceMark rm(THREAD); 425 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s", 426 this->external_name(), 427 access ? "" : "NOT ", 428 k->external_name()); 429 return access; 430 } 431 432 const char* InstanceKlass::nest_host_error() { 433 if (_nest_host_index == 0) { 434 return nullptr; 435 } else { 436 constantPoolHandle cph(Thread::current(), constants()); 437 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index); 438 } 439 } 440 441 void* InstanceKlass::operator new(size_t size, ClassLoaderData* loader_data, size_t word_size, 442 bool use_class_space, TRAPS) throw() { 443 return Metaspace::allocate(loader_data, word_size, ClassType, use_class_space, THREAD); 444 } 445 446 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) { 447 const int size = InstanceKlass::size(parser.vtable_size(), 448 parser.itable_size(), 449 nonstatic_oop_map_size(parser.total_oop_map_count()), 450 parser.is_interface()); 451 452 const Symbol* const class_name = parser.class_name(); 453 assert(class_name != nullptr, "invariant"); 454 ClassLoaderData* loader_data = parser.loader_data(); 455 assert(loader_data != nullptr, "invariant"); 456 457 InstanceKlass* ik; 458 const bool use_class_space = !parser.is_interface() && !parser.is_abstract(); 459 460 // Allocation 461 if (parser.is_instance_ref_klass()) { 462 // java.lang.ref.Reference 463 ik = new (loader_data, size, use_class_space, THREAD) InstanceRefKlass(parser); 464 } else if (class_name == vmSymbols::java_lang_Class()) { 465 // mirror - java.lang.Class 466 ik = new (loader_data, size, use_class_space, THREAD) InstanceMirrorKlass(parser); 467 } else if (is_stack_chunk_class(class_name, loader_data)) { 468 // stack chunk 469 ik = new (loader_data, size, use_class_space, THREAD) InstanceStackChunkKlass(parser); 470 } else if (is_class_loader(class_name, parser)) { 471 // class loader - java.lang.ClassLoader 472 ik = new (loader_data, size, use_class_space, THREAD) InstanceClassLoaderKlass(parser); 473 } else { 474 // normal 475 ik = new (loader_data, size, use_class_space, THREAD) InstanceKlass(parser); 476 } 477 478 // Check for pending exception before adding to the loader data and incrementing 479 // class count. Can get OOM here. 480 if (HAS_PENDING_EXCEPTION) { 481 return nullptr; 482 } 483 484 return ik; 485 } 486 487 488 // copy method ordering from resource area to Metaspace 489 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) { 490 if (m != nullptr) { 491 // allocate a new array and copy contents (memcpy?) 492 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 493 for (int i = 0; i < m->length(); i++) { 494 _method_ordering->at_put(i, m->at(i)); 495 } 496 } else { 497 _method_ordering = Universe::the_empty_int_array(); 498 } 499 } 500 501 // create a new array of vtable_indices for default methods 502 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 503 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 504 assert(default_vtable_indices() == nullptr, "only create once"); 505 set_default_vtable_indices(vtable_indices); 506 return vtable_indices; 507 } 508 509 510 InstanceKlass::InstanceKlass() { 511 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS"); 512 } 513 514 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, ReferenceType reference_type) : 515 Klass(kind), 516 _nest_members(nullptr), 517 _nest_host(nullptr), 518 _permitted_subclasses(nullptr), 519 _record_components(nullptr), 520 _static_field_size(parser.static_field_size()), 521 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())), 522 _itable_len(parser.itable_size()), 523 _nest_host_index(0), 524 _init_state(allocated), 525 _reference_type(reference_type), 526 _init_thread(nullptr) 527 { 528 set_vtable_length(parser.vtable_size()); 529 set_access_flags(parser.access_flags()); 530 if (parser.is_hidden()) set_is_hidden(); 531 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(), 532 false)); 533 534 assert(nullptr == _methods, "underlying memory not zeroed?"); 535 assert(is_instance_klass(), "is layout incorrect?"); 536 assert(size_helper() == parser.layout_size(), "incorrect size_helper?"); 537 } 538 539 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 540 Array<Method*>* methods) { 541 if (methods != nullptr && methods != Universe::the_empty_method_array() && 542 !methods->is_shared()) { 543 for (int i = 0; i < methods->length(); i++) { 544 Method* method = methods->at(i); 545 if (method == nullptr) continue; // maybe null if error processing 546 // Only want to delete methods that are not executing for RedefineClasses. 547 // The previous version will point to them so they're not totally dangling 548 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 549 MetadataFactory::free_metadata(loader_data, method); 550 } 551 MetadataFactory::free_array<Method*>(loader_data, methods); 552 } 553 } 554 555 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 556 const Klass* super_klass, 557 Array<InstanceKlass*>* local_interfaces, 558 Array<InstanceKlass*>* transitive_interfaces) { 559 // Only deallocate transitive interfaces if not empty, same as super class 560 // or same as local interfaces. See code in parseClassFile. 561 Array<InstanceKlass*>* ti = transitive_interfaces; 562 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) { 563 // check that the interfaces don't come from super class 564 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr : 565 InstanceKlass::cast(super_klass)->transitive_interfaces(); 566 if (ti != sti && ti != nullptr && !ti->is_shared()) { 567 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti); 568 } 569 } 570 571 // local interfaces can be empty 572 if (local_interfaces != Universe::the_empty_instance_klass_array() && 573 local_interfaces != nullptr && !local_interfaces->is_shared()) { 574 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces); 575 } 576 } 577 578 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data, 579 Array<RecordComponent*>* record_components) { 580 if (record_components != nullptr && !record_components->is_shared()) { 581 for (int i = 0; i < record_components->length(); i++) { 582 RecordComponent* record_component = record_components->at(i); 583 MetadataFactory::free_metadata(loader_data, record_component); 584 } 585 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components); 586 } 587 } 588 589 // This function deallocates the metadata and C heap pointers that the 590 // InstanceKlass points to. 591 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 592 // Orphan the mirror first, CMS thinks it's still live. 593 if (java_mirror() != nullptr) { 594 java_lang_Class::set_klass(java_mirror(), nullptr); 595 } 596 597 // Also remove mirror from handles 598 loader_data->remove_handle(_java_mirror); 599 600 // Need to take this class off the class loader data list. 601 loader_data->remove_class(this); 602 603 // The array_klass for this class is created later, after error handling. 604 // For class redefinition, we keep the original class so this scratch class 605 // doesn't have an array class. Either way, assert that there is nothing 606 // to deallocate. 607 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet"); 608 609 // Release C heap allocated data that this points to, which includes 610 // reference counting symbol names. 611 // Can't release the constant pool or MethodData C heap data here because the constant 612 // pool can be deallocated separately from the InstanceKlass for default methods and 613 // redefine classes. MethodData can also be released separately. 614 release_C_heap_structures(/* release_sub_metadata */ false); 615 616 deallocate_methods(loader_data, methods()); 617 set_methods(nullptr); 618 619 deallocate_record_components(loader_data, record_components()); 620 set_record_components(nullptr); 621 622 if (method_ordering() != nullptr && 623 method_ordering() != Universe::the_empty_int_array() && 624 !method_ordering()->is_shared()) { 625 MetadataFactory::free_array<int>(loader_data, method_ordering()); 626 } 627 set_method_ordering(nullptr); 628 629 // default methods can be empty 630 if (default_methods() != nullptr && 631 default_methods() != Universe::the_empty_method_array() && 632 !default_methods()->is_shared()) { 633 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 634 } 635 // Do NOT deallocate the default methods, they are owned by superinterfaces. 636 set_default_methods(nullptr); 637 638 // default methods vtable indices can be empty 639 if (default_vtable_indices() != nullptr && 640 !default_vtable_indices()->is_shared()) { 641 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 642 } 643 set_default_vtable_indices(nullptr); 644 645 646 // This array is in Klass, but remove it with the InstanceKlass since 647 // this place would be the only caller and it can share memory with transitive 648 // interfaces. 649 if (secondary_supers() != nullptr && 650 secondary_supers() != Universe::the_empty_klass_array() && 651 // see comments in compute_secondary_supers about the following cast 652 (address)(secondary_supers()) != (address)(transitive_interfaces()) && 653 !secondary_supers()->is_shared()) { 654 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 655 } 656 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY); 657 658 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 659 set_transitive_interfaces(nullptr); 660 set_local_interfaces(nullptr); 661 662 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->is_shared()) { 663 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream()); 664 } 665 set_fieldinfo_stream(nullptr); 666 667 if (fields_status() != nullptr && !fields_status()->is_shared()) { 668 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status()); 669 } 670 set_fields_status(nullptr); 671 672 // If a method from a redefined class is using this constant pool, don't 673 // delete it, yet. The new class's previous version will point to this. 674 if (constants() != nullptr) { 675 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 676 if (!constants()->is_shared()) { 677 MetadataFactory::free_metadata(loader_data, constants()); 678 } 679 // Delete any cached resolution errors for the constant pool 680 SystemDictionary::delete_resolution_error(constants()); 681 682 set_constants(nullptr); 683 } 684 685 if (inner_classes() != nullptr && 686 inner_classes() != Universe::the_empty_short_array() && 687 !inner_classes()->is_shared()) { 688 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 689 } 690 set_inner_classes(nullptr); 691 692 if (nest_members() != nullptr && 693 nest_members() != Universe::the_empty_short_array() && 694 !nest_members()->is_shared()) { 695 MetadataFactory::free_array<jushort>(loader_data, nest_members()); 696 } 697 set_nest_members(nullptr); 698 699 if (permitted_subclasses() != nullptr && 700 permitted_subclasses() != Universe::the_empty_short_array() && 701 !permitted_subclasses()->is_shared()) { 702 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses()); 703 } 704 set_permitted_subclasses(nullptr); 705 706 // We should deallocate the Annotations instance if it's not in shared spaces. 707 if (annotations() != nullptr && !annotations()->is_shared()) { 708 MetadataFactory::free_metadata(loader_data, annotations()); 709 } 710 set_annotations(nullptr); 711 712 SystemDictionaryShared::handle_class_unloading(this); 713 714 #if INCLUDE_CDS_JAVA_HEAP 715 if (CDSConfig::is_dumping_heap()) { 716 HeapShared::remove_scratch_objects(this); 717 } 718 #endif 719 } 720 721 bool InstanceKlass::is_record() const { 722 return _record_components != nullptr && 723 is_final() && 724 java_super() == vmClasses::Record_klass(); 725 } 726 727 bool InstanceKlass::is_sealed() const { 728 return _permitted_subclasses != nullptr && 729 _permitted_subclasses != Universe::the_empty_short_array(); 730 } 731 732 bool InstanceKlass::should_be_initialized() const { 733 return !is_initialized(); 734 } 735 736 klassItable InstanceKlass::itable() const { 737 return klassItable(const_cast<InstanceKlass*>(this)); 738 } 739 740 // JVMTI spec thinks there are signers and protection domain in the 741 // instanceKlass. These accessors pretend these fields are there. 742 // The hprof specification also thinks these fields are in InstanceKlass. 743 oop InstanceKlass::protection_domain() const { 744 // return the protection_domain from the mirror 745 return java_lang_Class::protection_domain(java_mirror()); 746 } 747 748 objArrayOop InstanceKlass::signers() const { 749 // return the signers from the mirror 750 return java_lang_Class::signers(java_mirror()); 751 } 752 753 oop InstanceKlass::init_lock() const { 754 // return the init lock from the mirror 755 oop lock = java_lang_Class::init_lock(java_mirror()); 756 // Prevent reordering with any access of initialization state 757 OrderAccess::loadload(); 758 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state 759 "only fully initialized state can have a null lock"); 760 return lock; 761 } 762 763 // Set the initialization lock to null so the object can be GC'ed. Any racing 764 // threads to get this lock will see a null lock and will not lock. 765 // That's okay because they all check for initialized state after getting 766 // the lock and return. 767 void InstanceKlass::fence_and_clear_init_lock() { 768 // make sure previous stores are all done, notably the init_state. 769 OrderAccess::storestore(); 770 java_lang_Class::clear_init_lock(java_mirror()); 771 assert(!is_not_initialized(), "class must be initialized now"); 772 } 773 774 775 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 776 // process. The step comments refers to the procedure described in that section. 777 // Note: implementation moved to static method to expose the this pointer. 778 void InstanceKlass::initialize(TRAPS) { 779 if (this->should_be_initialized()) { 780 initialize_impl(CHECK); 781 // Note: at this point the class may be initialized 782 // OR it may be in the state of being initialized 783 // in case of recursive initialization! 784 } else { 785 assert(is_initialized(), "sanity check"); 786 } 787 } 788 789 790 bool InstanceKlass::verify_code(TRAPS) { 791 // 1) Verify the bytecodes 792 return Verifier::verify(this, should_verify_class(), THREAD); 793 } 794 795 void InstanceKlass::link_class(TRAPS) { 796 assert(is_loaded(), "must be loaded"); 797 if (!is_linked()) { 798 link_class_impl(CHECK); 799 } 800 } 801 802 // Called to verify that a class can link during initialization, without 803 // throwing a VerifyError. 804 bool InstanceKlass::link_class_or_fail(TRAPS) { 805 assert(is_loaded(), "must be loaded"); 806 if (!is_linked()) { 807 link_class_impl(CHECK_false); 808 } 809 return is_linked(); 810 } 811 812 bool InstanceKlass::link_class_impl(TRAPS) { 813 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) { 814 // This is for CDS static dump only -- we use the in_error_state to indicate that 815 // the class has failed verification. Throwing the NoClassDefFoundError here is just 816 // a convenient way to stop repeat attempts to verify the same (bad) class. 817 // 818 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown 819 // if we are executing Java code. This is not a problem for CDS dumping phase since 820 // it doesn't execute any Java code. 821 ResourceMark rm(THREAD); 822 Exceptions::fthrow(THREAD_AND_LOCATION, 823 vmSymbols::java_lang_NoClassDefFoundError(), 824 "Class %s, or one of its supertypes, failed class initialization", 825 external_name()); 826 return false; 827 } 828 // return if already verified 829 if (is_linked()) { 830 return true; 831 } 832 833 // Timing 834 // timer handles recursion 835 JavaThread* jt = THREAD; 836 837 // link super class before linking this class 838 Klass* super_klass = super(); 839 if (super_klass != nullptr) { 840 if (super_klass->is_interface()) { // check if super class is an interface 841 ResourceMark rm(THREAD); 842 Exceptions::fthrow( 843 THREAD_AND_LOCATION, 844 vmSymbols::java_lang_IncompatibleClassChangeError(), 845 "class %s has interface %s as super class", 846 external_name(), 847 super_klass->external_name() 848 ); 849 return false; 850 } 851 852 InstanceKlass* ik_super = InstanceKlass::cast(super_klass); 853 ik_super->link_class_impl(CHECK_false); 854 } 855 856 // link all interfaces implemented by this class before linking this class 857 Array<InstanceKlass*>* interfaces = local_interfaces(); 858 int num_interfaces = interfaces->length(); 859 for (int index = 0; index < num_interfaces; index++) { 860 InstanceKlass* interk = interfaces->at(index); 861 interk->link_class_impl(CHECK_false); 862 } 863 864 // in case the class is linked in the process of linking its superclasses 865 if (is_linked()) { 866 return true; 867 } 868 869 // trace only the link time for this klass that includes 870 // the verification time 871 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 872 ClassLoader::perf_class_link_selftime(), 873 ClassLoader::perf_classes_linked(), 874 jt->get_thread_stat()->perf_recursion_counts_addr(), 875 jt->get_thread_stat()->perf_timers_addr(), 876 PerfClassTraceTime::CLASS_LINK); 877 878 // verification & rewriting 879 { 880 HandleMark hm(THREAD); 881 Handle h_init_lock(THREAD, init_lock()); 882 ObjectLocker ol(h_init_lock, jt); 883 // rewritten will have been set if loader constraint error found 884 // on an earlier link attempt 885 // don't verify or rewrite if already rewritten 886 // 887 888 if (!is_linked()) { 889 if (!is_rewritten()) { 890 if (is_shared()) { 891 assert(!verified_at_dump_time(), "must be"); 892 } 893 { 894 bool verify_ok = verify_code(THREAD); 895 if (!verify_ok) { 896 return false; 897 } 898 } 899 900 // Just in case a side-effect of verify linked this class already 901 // (which can sometimes happen since the verifier loads classes 902 // using custom class loaders, which are free to initialize things) 903 if (is_linked()) { 904 return true; 905 } 906 907 // also sets rewritten 908 rewrite_class(CHECK_false); 909 } else if (is_shared()) { 910 SystemDictionaryShared::check_verification_constraints(this, CHECK_false); 911 } 912 913 // relocate jsrs and link methods after they are all rewritten 914 link_methods(CHECK_false); 915 916 // Initialize the vtable and interface table after 917 // methods have been rewritten since rewrite may 918 // fabricate new Method*s. 919 // also does loader constraint checking 920 // 921 // initialize_vtable and initialize_itable need to be rerun 922 // for a shared class if 923 // 1) the class is loaded by custom class loader or 924 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or 925 // 3) the class was not verified during dump time 926 bool need_init_table = true; 927 if (is_shared() && verified_at_dump_time() && 928 SystemDictionaryShared::check_linking_constraints(THREAD, this)) { 929 need_init_table = false; 930 } 931 if (need_init_table) { 932 vtable().initialize_vtable_and_check_constraints(CHECK_false); 933 itable().initialize_itable_and_check_constraints(CHECK_false); 934 } 935 #ifdef ASSERT 936 vtable().verify(tty, true); 937 // In case itable verification is ever added. 938 // itable().verify(tty, true); 939 #endif 940 if (Universe::is_fully_initialized()) { 941 DeoptimizationScope deopt_scope; 942 { 943 // Now mark all code that assumes the class is not linked. 944 // Set state under the Compile_lock also. 945 MutexLocker ml(THREAD, Compile_lock); 946 947 set_init_state(linked); 948 CodeCache::mark_dependents_on(&deopt_scope, this); 949 } 950 // Perform the deopt handshake outside Compile_lock. 951 deopt_scope.deoptimize_marked(); 952 } else { 953 set_init_state(linked); 954 } 955 if (JvmtiExport::should_post_class_prepare()) { 956 JvmtiExport::post_class_prepare(THREAD, this); 957 } 958 } 959 } 960 return true; 961 } 962 963 // Rewrite the byte codes of all of the methods of a class. 964 // The rewriter must be called exactly once. Rewriting must happen after 965 // verification but before the first method of the class is executed. 966 void InstanceKlass::rewrite_class(TRAPS) { 967 assert(is_loaded(), "must be loaded"); 968 if (is_rewritten()) { 969 assert(is_shared(), "rewriting an unshared class?"); 970 return; 971 } 972 Rewriter::rewrite(this, CHECK); 973 set_rewritten(); 974 } 975 976 // Now relocate and link method entry points after class is rewritten. 977 // This is outside is_rewritten flag. In case of an exception, it can be 978 // executed more than once. 979 void InstanceKlass::link_methods(TRAPS) { 980 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time()); 981 982 int len = methods()->length(); 983 for (int i = len-1; i >= 0; i--) { 984 methodHandle m(THREAD, methods()->at(i)); 985 986 // Set up method entry points for compiler and interpreter . 987 m->link_method(m, CHECK); 988 } 989 } 990 991 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 992 void InstanceKlass::initialize_super_interfaces(TRAPS) { 993 assert (has_nonstatic_concrete_methods(), "caller should have checked this"); 994 for (int i = 0; i < local_interfaces()->length(); ++i) { 995 InstanceKlass* ik = local_interfaces()->at(i); 996 997 // Initialization is depth first search ie. we start with top of the inheritance tree 998 // has_nonstatic_concrete_methods drives searching superinterfaces since it 999 // means has_nonstatic_concrete_methods in its superinterface hierarchy 1000 if (ik->has_nonstatic_concrete_methods()) { 1001 ik->initialize_super_interfaces(CHECK); 1002 } 1003 1004 // Only initialize() interfaces that "declare" concrete methods. 1005 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) { 1006 ik->initialize(CHECK); 1007 } 1008 } 1009 } 1010 1011 using InitializationErrorTable = ResourceHashtable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>; 1012 static InitializationErrorTable* _initialization_error_table; 1013 1014 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) { 1015 // Create the same exception with a message indicating the thread name, 1016 // and the StackTraceElements. 1017 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception); 1018 ResourceMark rm(current); 1019 if (init_error.is_null()) { 1020 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name()); 1021 1022 // We failed to create the new exception, most likely due to either out-of-memory or 1023 // a stackoverflow error. If the original exception was either of those then we save 1024 // the shared, pre-allocated, stackless, instance of that exception. 1025 if (exception->klass() == vmClasses::StackOverflowError_klass()) { 1026 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name()); 1027 init_error = Handle(current, Universe::class_init_stack_overflow_error()); 1028 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) { 1029 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name()); 1030 init_error = Handle(current, Universe::class_init_out_of_memory_error()); 1031 } else { 1032 return; 1033 } 1034 } 1035 1036 MutexLocker ml(current, ClassInitError_lock); 1037 OopHandle elem = OopHandle(Universe::vm_global(), init_error()); 1038 bool created; 1039 if (_initialization_error_table == nullptr) { 1040 _initialization_error_table = new (mtClass) InitializationErrorTable(); 1041 } 1042 _initialization_error_table->put_if_absent(this, elem, &created); 1043 assert(created, "Initialization is single threaded"); 1044 log_trace(class, init)("Initialization error added for class %s", external_name()); 1045 } 1046 1047 oop InstanceKlass::get_initialization_error(JavaThread* current) { 1048 MutexLocker ml(current, ClassInitError_lock); 1049 if (_initialization_error_table == nullptr) { 1050 return nullptr; 1051 } 1052 OopHandle* h = _initialization_error_table->get(this); 1053 return (h != nullptr) ? h->resolve() : nullptr; 1054 } 1055 1056 // Need to remove entries for unloaded classes. 1057 void InstanceKlass::clean_initialization_error_table() { 1058 struct InitErrorTableCleaner { 1059 bool do_entry(const InstanceKlass* ik, OopHandle h) { 1060 if (!ik->is_loader_alive()) { 1061 h.release(Universe::vm_global()); 1062 return true; 1063 } else { 1064 return false; 1065 } 1066 } 1067 }; 1068 1069 assert_locked_or_safepoint(ClassInitError_lock); 1070 InitErrorTableCleaner cleaner; 1071 if (_initialization_error_table != nullptr) { 1072 _initialization_error_table->unlink(&cleaner); 1073 } 1074 } 1075 1076 void InstanceKlass::initialize_impl(TRAPS) { 1077 HandleMark hm(THREAD); 1078 1079 // Make sure klass is linked (verified) before initialization 1080 // A class could already be verified, since it has been reflected upon. 1081 link_class(CHECK); 1082 1083 DTRACE_CLASSINIT_PROBE(required, -1); 1084 1085 bool wait = false; 1086 1087 JavaThread* jt = THREAD; 1088 1089 bool debug_logging_enabled = log_is_enabled(Debug, class, init); 1090 1091 // refer to the JVM book page 47 for description of steps 1092 // Step 1 1093 { 1094 Handle h_init_lock(THREAD, init_lock()); 1095 ObjectLocker ol(h_init_lock, jt); 1096 1097 // Step 2 1098 // If we were to use wait() instead of waitInterruptibly() then 1099 // we might end up throwing IE from link/symbol resolution sites 1100 // that aren't expected to throw. This would wreak havoc. See 6320309. 1101 while (is_being_initialized() && !is_reentrant_initialization(jt)) { 1102 if (debug_logging_enabled) { 1103 ResourceMark rm(jt); 1104 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"", 1105 jt->name(), external_name(), init_thread_name()); 1106 } 1107 wait = true; 1108 jt->set_class_to_be_initialized(this); 1109 ol.wait_uninterruptibly(jt); 1110 jt->set_class_to_be_initialized(nullptr); 1111 } 1112 1113 // Step 3 1114 if (is_being_initialized() && is_reentrant_initialization(jt)) { 1115 if (debug_logging_enabled) { 1116 ResourceMark rm(jt); 1117 log_debug(class, init)("Thread \"%s\" recursively initializing %s", 1118 jt->name(), external_name()); 1119 } 1120 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait); 1121 return; 1122 } 1123 1124 // Step 4 1125 if (is_initialized()) { 1126 if (debug_logging_enabled) { 1127 ResourceMark rm(jt); 1128 log_debug(class, init)("Thread \"%s\" found %s already initialized", 1129 jt->name(), external_name()); 1130 } 1131 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait); 1132 return; 1133 } 1134 1135 // Step 5 1136 if (is_in_error_state()) { 1137 if (debug_logging_enabled) { 1138 ResourceMark rm(jt); 1139 log_debug(class, init)("Thread \"%s\" found %s is in error state", 1140 jt->name(), external_name()); 1141 } 1142 1143 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait); 1144 ResourceMark rm(THREAD); 1145 Handle cause(THREAD, get_initialization_error(THREAD)); 1146 1147 stringStream ss; 1148 ss.print("Could not initialize class %s", external_name()); 1149 if (cause.is_null()) { 1150 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string()); 1151 } else { 1152 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(), 1153 ss.as_string(), cause); 1154 } 1155 } else { 1156 1157 // Step 6 1158 set_init_state(being_initialized); 1159 set_init_thread(jt); 1160 if (debug_logging_enabled) { 1161 ResourceMark rm(jt); 1162 log_debug(class, init)("Thread \"%s\" is initializing %s", 1163 jt->name(), external_name()); 1164 } 1165 } 1166 } 1167 1168 // Step 7 1169 // Next, if C is a class rather than an interface, initialize it's super class and super 1170 // interfaces. 1171 if (!is_interface()) { 1172 Klass* super_klass = super(); 1173 if (super_klass != nullptr && super_klass->should_be_initialized()) { 1174 super_klass->initialize(THREAD); 1175 } 1176 // If C implements any interface that declares a non-static, concrete method, 1177 // the initialization of C triggers initialization of its super interfaces. 1178 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and 1179 // having a superinterface that declares, non-static, concrete methods 1180 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) { 1181 initialize_super_interfaces(THREAD); 1182 } 1183 1184 // If any exceptions, complete abruptly, throwing the same exception as above. 1185 if (HAS_PENDING_EXCEPTION) { 1186 Handle e(THREAD, PENDING_EXCEPTION); 1187 CLEAR_PENDING_EXCEPTION; 1188 { 1189 EXCEPTION_MARK; 1190 add_initialization_error(THREAD, e); 1191 // Locks object, set state, and notify all waiting threads 1192 set_initialization_state_and_notify(initialization_error, THREAD); 1193 CLEAR_PENDING_EXCEPTION; 1194 } 1195 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait); 1196 THROW_OOP(e()); 1197 } 1198 } 1199 1200 1201 // Step 8 1202 { 1203 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait); 1204 if (class_initializer() != nullptr) { 1205 // Timer includes any side effects of class initialization (resolution, 1206 // etc), but not recursive entry into call_class_initializer(). 1207 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 1208 ClassLoader::perf_class_init_selftime(), 1209 ClassLoader::perf_classes_inited(), 1210 jt->get_thread_stat()->perf_recursion_counts_addr(), 1211 jt->get_thread_stat()->perf_timers_addr(), 1212 PerfClassTraceTime::CLASS_CLINIT); 1213 call_class_initializer(THREAD); 1214 } else { 1215 // The elapsed time is so small it's not worth counting. 1216 if (UsePerfData) { 1217 ClassLoader::perf_classes_inited()->inc(); 1218 } 1219 call_class_initializer(THREAD); 1220 } 1221 } 1222 1223 // Step 9 1224 if (!HAS_PENDING_EXCEPTION) { 1225 set_initialization_state_and_notify(fully_initialized, CHECK); 1226 debug_only(vtable().verify(tty, true);) 1227 } 1228 else { 1229 // Step 10 and 11 1230 Handle e(THREAD, PENDING_EXCEPTION); 1231 CLEAR_PENDING_EXCEPTION; 1232 // JVMTI has already reported the pending exception 1233 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 1234 JvmtiExport::clear_detected_exception(jt); 1235 { 1236 EXCEPTION_MARK; 1237 add_initialization_error(THREAD, e); 1238 set_initialization_state_and_notify(initialization_error, THREAD); 1239 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 1240 // JVMTI has already reported the pending exception 1241 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 1242 JvmtiExport::clear_detected_exception(jt); 1243 } 1244 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait); 1245 if (e->is_a(vmClasses::Error_klass())) { 1246 THROW_OOP(e()); 1247 } else { 1248 JavaCallArguments args(e); 1249 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 1250 vmSymbols::throwable_void_signature(), 1251 &args); 1252 } 1253 } 1254 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait); 1255 } 1256 1257 1258 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 1259 Handle h_init_lock(THREAD, init_lock()); 1260 if (h_init_lock() != nullptr) { 1261 ObjectLocker ol(h_init_lock, THREAD); 1262 set_init_thread(nullptr); // reset _init_thread before changing _init_state 1263 set_init_state(state); 1264 fence_and_clear_init_lock(); 1265 ol.notify_all(CHECK); 1266 } else { 1267 assert(h_init_lock() != nullptr, "The initialization state should never be set twice"); 1268 set_init_thread(nullptr); // reset _init_thread before changing _init_state 1269 set_init_state(state); 1270 } 1271 } 1272 1273 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock 1274 // is grabbed, to ensure that the compiler is not using the class hierarchy. 1275 void InstanceKlass::add_to_hierarchy(JavaThread* current) { 1276 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint"); 1277 1278 DeoptimizationScope deopt_scope; 1279 { 1280 MutexLocker ml(current, Compile_lock); 1281 1282 set_init_state(InstanceKlass::loaded); 1283 // make sure init_state store is already done. 1284 // The compiler reads the hierarchy outside of the Compile_lock. 1285 // Access ordering is used to add to hierarchy. 1286 1287 // Link into hierarchy. 1288 append_to_sibling_list(); // add to superklass/sibling list 1289 process_interfaces(); // handle all "implements" declarations 1290 1291 // Now mark all code that depended on old class hierarchy. 1292 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97) 1293 if (Universe::is_fully_initialized()) { 1294 CodeCache::mark_dependents_on(&deopt_scope, this); 1295 } 1296 } 1297 // Perform the deopt handshake outside Compile_lock. 1298 deopt_scope.deoptimize_marked(); 1299 } 1300 1301 1302 InstanceKlass* InstanceKlass::implementor() const { 1303 InstanceKlass* volatile* ik = adr_implementor(); 1304 if (ik == nullptr) { 1305 return nullptr; 1306 } else { 1307 // This load races with inserts, and therefore needs acquire. 1308 InstanceKlass* ikls = Atomic::load_acquire(ik); 1309 if (ikls != nullptr && !ikls->is_loader_alive()) { 1310 return nullptr; // don't return unloaded class 1311 } else { 1312 return ikls; 1313 } 1314 } 1315 } 1316 1317 1318 void InstanceKlass::set_implementor(InstanceKlass* ik) { 1319 assert_locked_or_safepoint(Compile_lock); 1320 assert(is_interface(), "not interface"); 1321 InstanceKlass* volatile* addr = adr_implementor(); 1322 assert(addr != nullptr, "null addr"); 1323 if (addr != nullptr) { 1324 Atomic::release_store(addr, ik); 1325 } 1326 } 1327 1328 int InstanceKlass::nof_implementors() const { 1329 InstanceKlass* ik = implementor(); 1330 if (ik == nullptr) { 1331 return 0; 1332 } else if (ik != this) { 1333 return 1; 1334 } else { 1335 return 2; 1336 } 1337 } 1338 1339 // The embedded _implementor field can only record one implementor. 1340 // When there are more than one implementors, the _implementor field 1341 // is set to the interface Klass* itself. Following are the possible 1342 // values for the _implementor field: 1343 // null - no implementor 1344 // implementor Klass* - one implementor 1345 // self - more than one implementor 1346 // 1347 // The _implementor field only exists for interfaces. 1348 void InstanceKlass::add_implementor(InstanceKlass* ik) { 1349 if (Universe::is_fully_initialized()) { 1350 assert_lock_strong(Compile_lock); 1351 } 1352 assert(is_interface(), "not interface"); 1353 // Filter out my subinterfaces. 1354 // (Note: Interfaces are never on the subklass list.) 1355 if (ik->is_interface()) return; 1356 1357 // Filter out subclasses whose supers already implement me. 1358 // (Note: CHA must walk subclasses of direct implementors 1359 // in order to locate indirect implementors.) 1360 InstanceKlass* super_ik = ik->java_super(); 1361 if (super_ik != nullptr && super_ik->implements_interface(this)) 1362 // We only need to check one immediate superclass, since the 1363 // implements_interface query looks at transitive_interfaces. 1364 // Any supers of the super have the same (or fewer) transitive_interfaces. 1365 return; 1366 1367 InstanceKlass* iklass = implementor(); 1368 if (iklass == nullptr) { 1369 set_implementor(ik); 1370 } else if (iklass != this && iklass != ik) { 1371 // There is already an implementor. Use itself as an indicator of 1372 // more than one implementors. 1373 set_implementor(this); 1374 } 1375 1376 // The implementor also implements the transitive_interfaces 1377 for (int index = 0; index < local_interfaces()->length(); index++) { 1378 local_interfaces()->at(index)->add_implementor(ik); 1379 } 1380 } 1381 1382 void InstanceKlass::init_implementor() { 1383 if (is_interface()) { 1384 set_implementor(nullptr); 1385 } 1386 } 1387 1388 1389 void InstanceKlass::process_interfaces() { 1390 // link this class into the implementors list of every interface it implements 1391 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 1392 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 1393 InstanceKlass* interf = local_interfaces()->at(i); 1394 assert(interf->is_interface(), "expected interface"); 1395 interf->add_implementor(this); 1396 } 1397 } 1398 1399 bool InstanceKlass::can_be_primary_super_slow() const { 1400 if (is_interface()) 1401 return false; 1402 else 1403 return Klass::can_be_primary_super_slow(); 1404 } 1405 1406 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots, 1407 Array<InstanceKlass*>* transitive_interfaces) { 1408 // The secondaries are the implemented interfaces. 1409 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>, 1410 // (but it's safe to do here because we won't write into _secondary_supers from this point on). 1411 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces; 1412 int num_secondaries = num_extra_slots + interfaces->length(); 1413 if (num_secondaries == 0) { 1414 // Must share this for correct bootstrapping! 1415 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap()); 1416 return nullptr; 1417 } else if (num_extra_slots == 0 && interfaces->length() <= 1) { 1418 // We will reuse the transitive interfaces list if we're certain 1419 // it's in hash order. 1420 uintx bitmap = compute_secondary_supers_bitmap(interfaces); 1421 set_secondary_supers(interfaces, bitmap); 1422 return nullptr; 1423 } 1424 // Copy transitive interfaces to a temporary growable array to be constructed 1425 // into the secondary super list with extra slots. 1426 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1427 for (int i = 0; i < interfaces->length(); i++) { 1428 secondaries->push(interfaces->at(i)); 1429 } 1430 return secondaries; 1431 } 1432 1433 bool InstanceKlass::implements_interface(Klass* k) const { 1434 if (this == k) return true; 1435 assert(k->is_interface(), "should be an interface class"); 1436 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1437 if (transitive_interfaces()->at(i) == k) { 1438 return true; 1439 } 1440 } 1441 return false; 1442 } 1443 1444 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1445 // Verify direct super interface 1446 if (this == k) return true; 1447 assert(k->is_interface(), "should be an interface class"); 1448 for (int i = 0; i < local_interfaces()->length(); i++) { 1449 if (local_interfaces()->at(i) == k) { 1450 return true; 1451 } 1452 } 1453 return false; 1454 } 1455 1456 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1457 check_array_allocation_length(length, arrayOopDesc::max_array_length(T_OBJECT), CHECK_NULL); 1458 size_t size = objArrayOopDesc::object_size(length); 1459 ArrayKlass* ak = array_klass(n, CHECK_NULL); 1460 objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length, 1461 /* do_zero */ true, CHECK_NULL); 1462 return o; 1463 } 1464 1465 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1466 if (TraceFinalizerRegistration) { 1467 tty->print("Registered "); 1468 i->print_value_on(tty); 1469 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i)); 1470 } 1471 instanceHandle h_i(THREAD, i); 1472 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1473 JavaValue result(T_VOID); 1474 JavaCallArguments args(h_i); 1475 methodHandle mh(THREAD, Universe::finalizer_register_method()); 1476 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1477 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);) 1478 return h_i(); 1479 } 1480 1481 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1482 assert(!is_abstract() && !is_interface(), "Should not create this object"); 1483 size_t size = size_helper(); // Query before forming handle. 1484 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL); 1485 } 1486 1487 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) { 1488 Klass* k = java_lang_Class::as_Klass(java_class); 1489 if (k == nullptr) { 1490 ResourceMark rm(THREAD); 1491 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr); 1492 } 1493 InstanceKlass* ik = cast(k); 1494 ik->check_valid_for_instantiation(false, CHECK_NULL); 1495 ik->initialize(CHECK_NULL); 1496 return ik->allocate_instance(THREAD); 1497 } 1498 1499 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) { 1500 return instanceHandle(THREAD, allocate_instance(THREAD)); 1501 } 1502 1503 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1504 if (is_interface() || is_abstract()) { 1505 ResourceMark rm(THREAD); 1506 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1507 : vmSymbols::java_lang_InstantiationException(), external_name()); 1508 } 1509 if (this == vmClasses::Class_klass()) { 1510 ResourceMark rm(THREAD); 1511 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1512 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1513 } 1514 } 1515 1516 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) { 1517 // Need load-acquire for lock-free read 1518 if (array_klasses_acquire() == nullptr) { 1519 1520 // Recursively lock array allocation 1521 RecursiveLocker rl(MultiArray_lock, THREAD); 1522 1523 // Check if another thread created the array klass while we were waiting for the lock. 1524 if (array_klasses() == nullptr) { 1525 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL); 1526 // use 'release' to pair with lock-free load 1527 release_set_array_klasses(k); 1528 } 1529 } 1530 1531 // array_klasses() will always be set at this point 1532 ObjArrayKlass* ak = array_klasses(); 1533 assert(ak != nullptr, "should be set"); 1534 return ak->array_klass(n, THREAD); 1535 } 1536 1537 ArrayKlass* InstanceKlass::array_klass_or_null(int n) { 1538 // Need load-acquire for lock-free read 1539 ObjArrayKlass* oak = array_klasses_acquire(); 1540 if (oak == nullptr) { 1541 return nullptr; 1542 } else { 1543 return oak->array_klass_or_null(n); 1544 } 1545 } 1546 1547 ArrayKlass* InstanceKlass::array_klass(TRAPS) { 1548 return array_klass(1, THREAD); 1549 } 1550 1551 ArrayKlass* InstanceKlass::array_klass_or_null() { 1552 return array_klass_or_null(1); 1553 } 1554 1555 static int call_class_initializer_counter = 0; // for debugging 1556 1557 Method* InstanceKlass::class_initializer() const { 1558 Method* clinit = find_method( 1559 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1560 if (clinit != nullptr && clinit->has_valid_initializer_flags()) { 1561 return clinit; 1562 } 1563 return nullptr; 1564 } 1565 1566 void InstanceKlass::call_class_initializer(TRAPS) { 1567 if (ReplayCompiles && 1568 (ReplaySuppressInitializers == 1 || 1569 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) { 1570 // Hide the existence of the initializer for the purpose of replaying the compile 1571 return; 1572 } 1573 1574 #if INCLUDE_CDS 1575 // This is needed to ensure the consistency of the archived heap objects. 1576 if (has_archived_enum_objs()) { 1577 assert(is_shared(), "must be"); 1578 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK); 1579 if (initialized) { 1580 return; 1581 } 1582 } 1583 #endif 1584 1585 methodHandle h_method(THREAD, class_initializer()); 1586 assert(!is_initialized(), "we cannot initialize twice"); 1587 LogTarget(Info, class, init) lt; 1588 if (lt.is_enabled()) { 1589 ResourceMark rm(THREAD); 1590 LogStream ls(lt); 1591 ls.print("%d Initializing ", call_class_initializer_counter++); 1592 name()->print_value_on(&ls); 1593 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"", 1594 h_method() == nullptr ? "(no method)" : "", p2i(this), 1595 THREAD->name()); 1596 } 1597 if (h_method() != nullptr) { 1598 JavaCallArguments args; // No arguments 1599 JavaValue result(T_VOID); 1600 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1601 } 1602 } 1603 1604 1605 void InstanceKlass::mask_for(const methodHandle& method, int bci, 1606 InterpreterOopMap* entry_for) { 1607 // Lazily create the _oop_map_cache at first request. 1608 // Load_acquire is needed to safely get instance published with CAS by another thread. 1609 OopMapCache* oop_map_cache = Atomic::load_acquire(&_oop_map_cache); 1610 if (oop_map_cache == nullptr) { 1611 // Try to install new instance atomically. 1612 oop_map_cache = new OopMapCache(); 1613 OopMapCache* other = Atomic::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache); 1614 if (other != nullptr) { 1615 // Someone else managed to install before us, ditch local copy and use the existing one. 1616 delete oop_map_cache; 1617 oop_map_cache = other; 1618 } 1619 } 1620 // _oop_map_cache is constant after init; lookup below does its own locking. 1621 oop_map_cache->lookup(method, bci, entry_for); 1622 } 1623 1624 bool InstanceKlass::contains_field_offset(int offset) { 1625 fieldDescriptor fd; 1626 return find_field_from_offset(offset, false, &fd); 1627 } 1628 1629 FieldInfo InstanceKlass::field(int index) const { 1630 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 1631 if (fs.index() == index) { 1632 return fs.to_FieldInfo(); 1633 } 1634 } 1635 fatal("Field not found"); 1636 return FieldInfo(); 1637 } 1638 1639 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1640 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1641 Symbol* f_name = fs.name(); 1642 Symbol* f_sig = fs.signature(); 1643 if (f_name == name && f_sig == sig) { 1644 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1645 return true; 1646 } 1647 } 1648 return false; 1649 } 1650 1651 1652 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1653 const int n = local_interfaces()->length(); 1654 for (int i = 0; i < n; i++) { 1655 Klass* intf1 = local_interfaces()->at(i); 1656 assert(intf1->is_interface(), "just checking type"); 1657 // search for field in current interface 1658 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1659 assert(fd->is_static(), "interface field must be static"); 1660 return intf1; 1661 } 1662 // search for field in direct superinterfaces 1663 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1664 if (intf2 != nullptr) return intf2; 1665 } 1666 // otherwise field lookup fails 1667 return nullptr; 1668 } 1669 1670 1671 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1672 // search order according to newest JVM spec (5.4.3.2, p.167). 1673 // 1) search for field in current klass 1674 if (find_local_field(name, sig, fd)) { 1675 return const_cast<InstanceKlass*>(this); 1676 } 1677 // 2) search for field recursively in direct superinterfaces 1678 { Klass* intf = find_interface_field(name, sig, fd); 1679 if (intf != nullptr) return intf; 1680 } 1681 // 3) apply field lookup recursively if superclass exists 1682 { Klass* supr = super(); 1683 if (supr != nullptr) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1684 } 1685 // 4) otherwise field lookup fails 1686 return nullptr; 1687 } 1688 1689 1690 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1691 // search order according to newest JVM spec (5.4.3.2, p.167). 1692 // 1) search for field in current klass 1693 if (find_local_field(name, sig, fd)) { 1694 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1695 } 1696 // 2) search for field recursively in direct superinterfaces 1697 if (is_static) { 1698 Klass* intf = find_interface_field(name, sig, fd); 1699 if (intf != nullptr) return intf; 1700 } 1701 // 3) apply field lookup recursively if superclass exists 1702 { Klass* supr = super(); 1703 if (supr != nullptr) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1704 } 1705 // 4) otherwise field lookup fails 1706 return nullptr; 1707 } 1708 1709 1710 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1711 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1712 if (fs.offset() == offset) { 1713 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1714 if (fd->is_static() == is_static) return true; 1715 } 1716 } 1717 return false; 1718 } 1719 1720 1721 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1722 Klass* klass = const_cast<InstanceKlass*>(this); 1723 while (klass != nullptr) { 1724 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1725 return true; 1726 } 1727 klass = klass->super(); 1728 } 1729 return false; 1730 } 1731 1732 1733 void InstanceKlass::methods_do(void f(Method* method)) { 1734 // Methods aren't stable until they are loaded. This can be read outside 1735 // a lock through the ClassLoaderData for profiling 1736 // Redefined scratch classes are on the list and need to be cleaned 1737 if (!is_loaded() && !is_scratch_class()) { 1738 return; 1739 } 1740 1741 int len = methods()->length(); 1742 for (int index = 0; index < len; index++) { 1743 Method* m = methods()->at(index); 1744 assert(m->is_method(), "must be method"); 1745 f(m); 1746 } 1747 } 1748 1749 1750 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1751 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1752 if (fs.access_flags().is_static()) { 1753 fieldDescriptor& fd = fs.field_descriptor(); 1754 cl->do_field(&fd); 1755 } 1756 } 1757 } 1758 1759 1760 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1761 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1762 if (fs.access_flags().is_static()) { 1763 fieldDescriptor& fd = fs.field_descriptor(); 1764 f(&fd, mirror, CHECK); 1765 } 1766 } 1767 } 1768 1769 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1770 InstanceKlass* super = superklass(); 1771 if (super != nullptr) { 1772 super->do_nonstatic_fields(cl); 1773 } 1774 fieldDescriptor fd; 1775 int length = java_fields_count(); 1776 for (int i = 0; i < length; i += 1) { 1777 fd.reinitialize(this, i); 1778 if (!fd.is_static()) { 1779 cl->do_field(&fd); 1780 } 1781 } 1782 } 1783 1784 // first in Pair is offset, second is index. 1785 static int compare_fields_by_offset(Pair<int,int>* a, Pair<int,int>* b) { 1786 return a->first - b->first; 1787 } 1788 1789 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) { 1790 InstanceKlass* super = superklass(); 1791 if (super != nullptr) { 1792 super->print_nonstatic_fields(cl); 1793 } 1794 ResourceMark rm; 1795 fieldDescriptor fd; 1796 // In DebugInfo nonstatic fields are sorted by offset. 1797 GrowableArray<Pair<int,int> > fields_sorted; 1798 int i = 0; 1799 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 1800 if (!fs.access_flags().is_static()) { 1801 fd = fs.field_descriptor(); 1802 Pair<int,int> f(fs.offset(), fs.index()); 1803 fields_sorted.push(f); 1804 i++; 1805 } 1806 } 1807 if (i > 0) { 1808 int length = i; 1809 assert(length == fields_sorted.length(), "duh"); 1810 fields_sorted.sort(compare_fields_by_offset); 1811 for (int i = 0; i < length; i++) { 1812 fd.reinitialize(this, fields_sorted.at(i).second); 1813 assert(!fd.is_static() && fd.offset() == fields_sorted.at(i).first, "only nonstatic fields"); 1814 cl->do_field(&fd); 1815 } 1816 } 1817 } 1818 1819 #ifdef ASSERT 1820 static int linear_search(const Array<Method*>* methods, 1821 const Symbol* name, 1822 const Symbol* signature) { 1823 const int len = methods->length(); 1824 for (int index = 0; index < len; index++) { 1825 const Method* const m = methods->at(index); 1826 assert(m->is_method(), "must be method"); 1827 if (m->signature() == signature && m->name() == name) { 1828 return index; 1829 } 1830 } 1831 return -1; 1832 } 1833 #endif 1834 1835 bool InstanceKlass::_disable_method_binary_search = false; 1836 1837 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) { 1838 int len = methods->length(); 1839 int l = 0; 1840 int h = len - 1; 1841 while (l <= h) { 1842 Method* m = methods->at(l); 1843 if (m->name() == name) { 1844 return l; 1845 } 1846 l++; 1847 } 1848 return -1; 1849 } 1850 1851 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) { 1852 if (_disable_method_binary_search) { 1853 assert(CDSConfig::is_dumping_dynamic_archive(), "must be"); 1854 // At the final stage of dynamic dumping, the methods array may not be sorted 1855 // by ascending addresses of their names, so we can't use binary search anymore. 1856 // However, methods with the same name are still laid out consecutively inside the 1857 // methods array, so let's look for the first one that matches. 1858 return linear_search(methods, name); 1859 } 1860 1861 int len = methods->length(); 1862 int l = 0; 1863 int h = len - 1; 1864 1865 // methods are sorted by ascending addresses of their names, so do binary search 1866 while (l <= h) { 1867 int mid = (l + h) >> 1; 1868 Method* m = methods->at(mid); 1869 assert(m->is_method(), "must be method"); 1870 int res = m->name()->fast_compare(name); 1871 if (res == 0) { 1872 return mid; 1873 } else if (res < 0) { 1874 l = mid + 1; 1875 } else { 1876 h = mid - 1; 1877 } 1878 } 1879 return -1; 1880 } 1881 1882 // find_method looks up the name/signature in the local methods array 1883 Method* InstanceKlass::find_method(const Symbol* name, 1884 const Symbol* signature) const { 1885 return find_method_impl(name, signature, 1886 OverpassLookupMode::find, 1887 StaticLookupMode::find, 1888 PrivateLookupMode::find); 1889 } 1890 1891 Method* InstanceKlass::find_method_impl(const Symbol* name, 1892 const Symbol* signature, 1893 OverpassLookupMode overpass_mode, 1894 StaticLookupMode static_mode, 1895 PrivateLookupMode private_mode) const { 1896 return InstanceKlass::find_method_impl(methods(), 1897 name, 1898 signature, 1899 overpass_mode, 1900 static_mode, 1901 private_mode); 1902 } 1903 1904 // find_instance_method looks up the name/signature in the local methods array 1905 // and skips over static methods 1906 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods, 1907 const Symbol* name, 1908 const Symbol* signature, 1909 PrivateLookupMode private_mode) { 1910 Method* const meth = InstanceKlass::find_method_impl(methods, 1911 name, 1912 signature, 1913 OverpassLookupMode::find, 1914 StaticLookupMode::skip, 1915 private_mode); 1916 assert(((meth == nullptr) || !meth->is_static()), 1917 "find_instance_method should have skipped statics"); 1918 return meth; 1919 } 1920 1921 // find_instance_method looks up the name/signature in the local methods array 1922 // and skips over static methods 1923 Method* InstanceKlass::find_instance_method(const Symbol* name, 1924 const Symbol* signature, 1925 PrivateLookupMode private_mode) const { 1926 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode); 1927 } 1928 1929 // Find looks up the name/signature in the local methods array 1930 // and filters on the overpass, static and private flags 1931 // This returns the first one found 1932 // note that the local methods array can have up to one overpass, one static 1933 // and one instance (private or not) with the same name/signature 1934 Method* InstanceKlass::find_local_method(const Symbol* name, 1935 const Symbol* signature, 1936 OverpassLookupMode overpass_mode, 1937 StaticLookupMode static_mode, 1938 PrivateLookupMode private_mode) const { 1939 return InstanceKlass::find_method_impl(methods(), 1940 name, 1941 signature, 1942 overpass_mode, 1943 static_mode, 1944 private_mode); 1945 } 1946 1947 // Find looks up the name/signature in the local methods array 1948 // and filters on the overpass, static and private flags 1949 // This returns the first one found 1950 // note that the local methods array can have up to one overpass, one static 1951 // and one instance (private or not) with the same name/signature 1952 Method* InstanceKlass::find_local_method(const Array<Method*>* methods, 1953 const Symbol* name, 1954 const Symbol* signature, 1955 OverpassLookupMode overpass_mode, 1956 StaticLookupMode static_mode, 1957 PrivateLookupMode private_mode) { 1958 return InstanceKlass::find_method_impl(methods, 1959 name, 1960 signature, 1961 overpass_mode, 1962 static_mode, 1963 private_mode); 1964 } 1965 1966 Method* InstanceKlass::find_method(const Array<Method*>* methods, 1967 const Symbol* name, 1968 const Symbol* signature) { 1969 return InstanceKlass::find_method_impl(methods, 1970 name, 1971 signature, 1972 OverpassLookupMode::find, 1973 StaticLookupMode::find, 1974 PrivateLookupMode::find); 1975 } 1976 1977 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods, 1978 const Symbol* name, 1979 const Symbol* signature, 1980 OverpassLookupMode overpass_mode, 1981 StaticLookupMode static_mode, 1982 PrivateLookupMode private_mode) { 1983 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode); 1984 return hit >= 0 ? methods->at(hit): nullptr; 1985 } 1986 1987 // true if method matches signature and conforms to skipping_X conditions. 1988 static bool method_matches(const Method* m, 1989 const Symbol* signature, 1990 bool skipping_overpass, 1991 bool skipping_static, 1992 bool skipping_private) { 1993 return ((m->signature() == signature) && 1994 (!skipping_overpass || !m->is_overpass()) && 1995 (!skipping_static || !m->is_static()) && 1996 (!skipping_private || !m->is_private())); 1997 } 1998 1999 // Used directly for default_methods to find the index into the 2000 // default_vtable_indices, and indirectly by find_method 2001 // find_method_index looks in the local methods array to return the index 2002 // of the matching name/signature. If, overpass methods are being ignored, 2003 // the search continues to find a potential non-overpass match. This capability 2004 // is important during method resolution to prefer a static method, for example, 2005 // over an overpass method. 2006 // There is the possibility in any _method's array to have the same name/signature 2007 // for a static method, an overpass method and a local instance method 2008 // To correctly catch a given method, the search criteria may need 2009 // to explicitly skip the other two. For local instance methods, it 2010 // is often necessary to skip private methods 2011 int InstanceKlass::find_method_index(const Array<Method*>* methods, 2012 const Symbol* name, 2013 const Symbol* signature, 2014 OverpassLookupMode overpass_mode, 2015 StaticLookupMode static_mode, 2016 PrivateLookupMode private_mode) { 2017 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip); 2018 const bool skipping_static = (static_mode == StaticLookupMode::skip); 2019 const bool skipping_private = (private_mode == PrivateLookupMode::skip); 2020 const int hit = quick_search(methods, name); 2021 if (hit != -1) { 2022 const Method* const m = methods->at(hit); 2023 2024 // Do linear search to find matching signature. First, quick check 2025 // for common case, ignoring overpasses if requested. 2026 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 2027 return hit; 2028 } 2029 2030 // search downwards through overloaded methods 2031 int i; 2032 for (i = hit - 1; i >= 0; --i) { 2033 const Method* const m = methods->at(i); 2034 assert(m->is_method(), "must be method"); 2035 if (m->name() != name) { 2036 break; 2037 } 2038 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 2039 return i; 2040 } 2041 } 2042 // search upwards 2043 for (i = hit + 1; i < methods->length(); ++i) { 2044 const Method* const m = methods->at(i); 2045 assert(m->is_method(), "must be method"); 2046 if (m->name() != name) { 2047 break; 2048 } 2049 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 2050 return i; 2051 } 2052 } 2053 // not found 2054 #ifdef ASSERT 2055 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : 2056 linear_search(methods, name, signature); 2057 assert(-1 == index, "binary search should have found entry %d", index); 2058 #endif 2059 } 2060 return -1; 2061 } 2062 2063 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const { 2064 return find_method_by_name(methods(), name, end); 2065 } 2066 2067 int InstanceKlass::find_method_by_name(const Array<Method*>* methods, 2068 const Symbol* name, 2069 int* end_ptr) { 2070 assert(end_ptr != nullptr, "just checking"); 2071 int start = quick_search(methods, name); 2072 int end = start + 1; 2073 if (start != -1) { 2074 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 2075 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 2076 *end_ptr = end; 2077 return start; 2078 } 2079 return -1; 2080 } 2081 2082 // uncached_lookup_method searches both the local class methods array and all 2083 // superclasses methods arrays, skipping any overpass methods in superclasses, 2084 // and possibly skipping private methods. 2085 Method* InstanceKlass::uncached_lookup_method(const Symbol* name, 2086 const Symbol* signature, 2087 OverpassLookupMode overpass_mode, 2088 PrivateLookupMode private_mode) const { 2089 OverpassLookupMode overpass_local_mode = overpass_mode; 2090 const Klass* klass = this; 2091 while (klass != nullptr) { 2092 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name, 2093 signature, 2094 overpass_local_mode, 2095 StaticLookupMode::find, 2096 private_mode); 2097 if (method != nullptr) { 2098 return method; 2099 } 2100 klass = klass->super(); 2101 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses 2102 } 2103 return nullptr; 2104 } 2105 2106 #ifdef ASSERT 2107 // search through class hierarchy and return true if this class or 2108 // one of the superclasses was redefined 2109 bool InstanceKlass::has_redefined_this_or_super() const { 2110 const Klass* klass = this; 2111 while (klass != nullptr) { 2112 if (InstanceKlass::cast(klass)->has_been_redefined()) { 2113 return true; 2114 } 2115 klass = klass->super(); 2116 } 2117 return false; 2118 } 2119 #endif 2120 2121 // lookup a method in the default methods list then in all transitive interfaces 2122 // Do NOT return private or static methods 2123 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 2124 Symbol* signature) const { 2125 Method* m = nullptr; 2126 if (default_methods() != nullptr) { 2127 m = find_method(default_methods(), name, signature); 2128 } 2129 // Look up interfaces 2130 if (m == nullptr) { 2131 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find); 2132 } 2133 return m; 2134 } 2135 2136 // lookup a method in all the interfaces that this class implements 2137 // Do NOT return private or static methods, new in JDK8 which are not externally visible 2138 // They should only be found in the initial InterfaceMethodRef 2139 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 2140 Symbol* signature, 2141 DefaultsLookupMode defaults_mode) const { 2142 Array<InstanceKlass*>* all_ifs = transitive_interfaces(); 2143 int num_ifs = all_ifs->length(); 2144 InstanceKlass *ik = nullptr; 2145 for (int i = 0; i < num_ifs; i++) { 2146 ik = all_ifs->at(i); 2147 Method* m = ik->lookup_method(name, signature); 2148 if (m != nullptr && m->is_public() && !m->is_static() && 2149 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) { 2150 return m; 2151 } 2152 } 2153 return nullptr; 2154 } 2155 2156 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose) 2157 :_st(st), _verbose(verbose) { 2158 ResourceMark rm; 2159 _st->print("%-18s ", "KlassAddr"); 2160 _st->print("%-4s ", "Size"); 2161 _st->print("%-20s ", "State"); 2162 _st->print("%-7s ", "Flags"); 2163 _st->print("%-5s ", "ClassName"); 2164 _st->cr(); 2165 } 2166 2167 void PrintClassClosure::do_klass(Klass* k) { 2168 ResourceMark rm; 2169 // klass pointer 2170 _st->print(PTR_FORMAT " ", p2i(k)); 2171 // klass size 2172 _st->print("%4d ", k->size()); 2173 // initialization state 2174 if (k->is_instance_klass()) { 2175 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name()); 2176 } else { 2177 _st->print("%-20s ",""); 2178 } 2179 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc) 2180 char buf[10]; 2181 int i = 0; 2182 if (k->has_finalizer()) buf[i++] = 'F'; 2183 if (k->is_instance_klass()) { 2184 InstanceKlass* ik = InstanceKlass::cast(k); 2185 if (ik->has_final_method()) buf[i++] = 'f'; 2186 if (ik->is_rewritten()) buf[i++] = 'W'; 2187 if (ik->is_contended()) buf[i++] = 'C'; 2188 if (ik->has_been_redefined()) buf[i++] = 'R'; 2189 if (ik->is_shared()) buf[i++] = 'S'; 2190 } 2191 buf[i++] = '\0'; 2192 _st->print("%-7s ", buf); 2193 // klass name 2194 _st->print("%-5s ", k->external_name()); 2195 // end 2196 _st->cr(); 2197 if (_verbose) { 2198 k->print_on(_st); 2199 } 2200 } 2201 2202 /* jni_id_for for jfieldIds only */ 2203 JNIid* InstanceKlass::jni_id_for(int offset) { 2204 MutexLocker ml(JfieldIdCreation_lock); 2205 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset); 2206 if (probe == nullptr) { 2207 // Allocate new static field identifier 2208 probe = new JNIid(this, offset, jni_ids()); 2209 set_jni_ids(probe); 2210 } 2211 return probe; 2212 } 2213 2214 u2 InstanceKlass::enclosing_method_data(int offset) const { 2215 const Array<jushort>* const inner_class_list = inner_classes(); 2216 if (inner_class_list == nullptr) { 2217 return 0; 2218 } 2219 const int length = inner_class_list->length(); 2220 if (length % inner_class_next_offset == 0) { 2221 return 0; 2222 } 2223 const int index = length - enclosing_method_attribute_size; 2224 assert(offset < enclosing_method_attribute_size, "invalid offset"); 2225 return inner_class_list->at(index + offset); 2226 } 2227 2228 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 2229 u2 method_index) { 2230 Array<jushort>* inner_class_list = inner_classes(); 2231 assert (inner_class_list != nullptr, "_inner_classes list is not set up"); 2232 int length = inner_class_list->length(); 2233 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 2234 int index = length - enclosing_method_attribute_size; 2235 inner_class_list->at_put( 2236 index + enclosing_method_class_index_offset, class_index); 2237 inner_class_list->at_put( 2238 index + enclosing_method_method_index_offset, method_index); 2239 } 2240 } 2241 2242 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) { 2243 if (method->is_old() && !method->is_obsolete()) { 2244 // If the method passed in is old (but not obsolete), use the current version. 2245 method = method_with_idnum((int)idnum); 2246 assert(method != nullptr, "old and but not obsolete, so should exist"); 2247 } 2248 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method); 2249 Atomic::release_store(&jmeths[idnum + 1], new_id); 2250 return new_id; 2251 } 2252 2253 // Lookup or create a jmethodID. 2254 // This code is called by the VMThread and JavaThreads so the 2255 // locking has to be done very carefully to avoid deadlocks 2256 // and/or other cache consistency problems. 2257 // 2258 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) { 2259 Method* method = method_h(); 2260 int idnum = method->method_idnum(); 2261 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2262 2263 // We use a double-check locking idiom here because this cache is 2264 // performance sensitive. In the normal system, this cache only 2265 // transitions from null to non-null which is safe because we use 2266 // release_set_methods_jmethod_ids() to advertise the new cache. 2267 // A partially constructed cache should never be seen by a racing 2268 // thread. We also use release_store() to save a new jmethodID 2269 // in the cache so a partially constructed jmethodID should never be 2270 // seen either. Cache reads of existing jmethodIDs proceed without a 2271 // lock, but cache writes of a new jmethodID requires uniqueness and 2272 // creation of the cache itself requires no leaks so a lock is 2273 // acquired in those two cases. 2274 // 2275 // If the RedefineClasses() API has been used, then this cache grows 2276 // in the redefinition safepoint. 2277 2278 if (jmeths == nullptr) { 2279 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag); 2280 jmeths = methods_jmethod_ids_acquire(); 2281 // Still null? 2282 if (jmeths == nullptr) { 2283 size_t size = idnum_allocated_count(); 2284 assert(size > (size_t)idnum, "should already have space"); 2285 jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass); 2286 memset(jmeths, 0, (size + 1) * sizeof(jmethodID)); 2287 // cache size is stored in element[0], other elements offset by one 2288 jmeths[0] = (jmethodID)size; 2289 jmethodID new_id = update_jmethod_id(jmeths, method, idnum); 2290 2291 // publish jmeths 2292 release_set_methods_jmethod_ids(jmeths); 2293 return new_id; 2294 } 2295 } 2296 2297 jmethodID id = Atomic::load_acquire(&jmeths[idnum + 1]); 2298 if (id == nullptr) { 2299 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag); 2300 id = jmeths[idnum + 1]; 2301 // Still null? 2302 if (id == nullptr) { 2303 return update_jmethod_id(jmeths, method, idnum); 2304 } 2305 } 2306 return id; 2307 } 2308 2309 void InstanceKlass::update_methods_jmethod_cache() { 2310 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 2311 jmethodID* cache = _methods_jmethod_ids; 2312 if (cache != nullptr) { 2313 size_t size = idnum_allocated_count(); 2314 size_t old_size = (size_t)cache[0]; 2315 if (old_size < size + 1) { 2316 // Allocate a larger one and copy entries to the new one. 2317 // They've already been updated to point to new methods where applicable (i.e., not obsolete). 2318 jmethodID* new_cache = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass); 2319 memset(new_cache, 0, (size + 1) * sizeof(jmethodID)); 2320 // The cache size is stored in element[0]; the other elements are offset by one. 2321 new_cache[0] = (jmethodID)size; 2322 2323 for (int i = 1; i <= (int)old_size; i++) { 2324 new_cache[i] = cache[i]; 2325 } 2326 _methods_jmethod_ids = new_cache; 2327 FREE_C_HEAP_ARRAY(jmethodID, cache); 2328 } 2329 } 2330 } 2331 2332 // Figure out how many jmethodIDs haven't been allocated, and make 2333 // sure space for them is pre-allocated. This makes getting all 2334 // method ids much, much faster with classes with more than 8 2335 // methods, and has a *substantial* effect on performance with jvmti 2336 // code that loads all jmethodIDs for all classes. 2337 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 2338 int new_jmeths = 0; 2339 int length = methods()->length(); 2340 for (int index = start_offset; index < length; index++) { 2341 Method* m = methods()->at(index); 2342 jmethodID id = m->find_jmethod_id_or_null(); 2343 if (id == nullptr) { 2344 new_jmeths++; 2345 } 2346 } 2347 if (new_jmeths != 0) { 2348 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 2349 } 2350 } 2351 2352 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles 2353 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 2354 int idnum = method->method_idnum(); 2355 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2356 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr; 2357 } 2358 2359 inline DependencyContext InstanceKlass::dependencies() { 2360 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned); 2361 return dep_context; 2362 } 2363 2364 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) { 2365 dependencies().mark_dependent_nmethods(deopt_scope, changes); 2366 } 2367 2368 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 2369 dependencies().add_dependent_nmethod(nm); 2370 } 2371 2372 void InstanceKlass::clean_dependency_context() { 2373 dependencies().clean_unloading_dependents(); 2374 } 2375 2376 #ifndef PRODUCT 2377 void InstanceKlass::print_dependent_nmethods(bool verbose) { 2378 dependencies().print_dependent_nmethods(verbose); 2379 } 2380 2381 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 2382 return dependencies().is_dependent_nmethod(nm); 2383 } 2384 #endif //PRODUCT 2385 2386 void InstanceKlass::clean_weak_instanceklass_links() { 2387 clean_implementors_list(); 2388 clean_method_data(); 2389 } 2390 2391 void InstanceKlass::clean_implementors_list() { 2392 assert(is_loader_alive(), "this klass should be live"); 2393 if (is_interface()) { 2394 assert (ClassUnloading, "only called for ClassUnloading"); 2395 for (;;) { 2396 // Use load_acquire due to competing with inserts 2397 InstanceKlass* volatile* iklass = adr_implementor(); 2398 assert(iklass != nullptr, "Klass must not be null"); 2399 InstanceKlass* impl = Atomic::load_acquire(iklass); 2400 if (impl != nullptr && !impl->is_loader_alive()) { 2401 // null this field, might be an unloaded instance klass or null 2402 if (Atomic::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) { 2403 // Successfully unlinking implementor. 2404 if (log_is_enabled(Trace, class, unload)) { 2405 ResourceMark rm; 2406 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name()); 2407 } 2408 return; 2409 } 2410 } else { 2411 return; 2412 } 2413 } 2414 } 2415 } 2416 2417 void InstanceKlass::clean_method_data() { 2418 for (int m = 0; m < methods()->length(); m++) { 2419 MethodData* mdo = methods()->at(m)->method_data(); 2420 if (mdo != nullptr) { 2421 mdo->clean_method_data(/*always_clean*/false); 2422 } 2423 } 2424 } 2425 2426 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) { 2427 Klass::metaspace_pointers_do(it); 2428 2429 if (log_is_enabled(Trace, cds)) { 2430 ResourceMark rm; 2431 log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name()); 2432 } 2433 2434 it->push(&_annotations); 2435 it->push((Klass**)&_array_klasses); 2436 if (!is_rewritten()) { 2437 it->push(&_constants, MetaspaceClosure::_writable); 2438 } else { 2439 it->push(&_constants); 2440 } 2441 it->push(&_inner_classes); 2442 #if INCLUDE_JVMTI 2443 it->push(&_previous_versions); 2444 #endif 2445 #if INCLUDE_CDS 2446 // For "old" classes with methods containing the jsr bytecode, the _methods array will 2447 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely 2448 // checked here with ByteCodeStream. All methods that can't be verified are made writable. 2449 // The length check on the _methods is necessary because classes which don't have any 2450 // methods share the Universe::_the_empty_method_array which is in the RO region. 2451 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) { 2452 // To handle jsr bytecode, new Method* maybe stored into _methods 2453 it->push(&_methods, MetaspaceClosure::_writable); 2454 } else { 2455 #endif 2456 it->push(&_methods); 2457 #if INCLUDE_CDS 2458 } 2459 #endif 2460 it->push(&_default_methods); 2461 it->push(&_local_interfaces); 2462 it->push(&_transitive_interfaces); 2463 it->push(&_method_ordering); 2464 if (!is_rewritten()) { 2465 it->push(&_default_vtable_indices, MetaspaceClosure::_writable); 2466 } else { 2467 it->push(&_default_vtable_indices); 2468 } 2469 2470 it->push(&_fieldinfo_stream); 2471 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update() 2472 it->push(&_fields_status, MetaspaceClosure::_writable); 2473 2474 if (itable_length() > 0) { 2475 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2476 int method_table_offset_in_words = ioe->offset()/wordSize; 2477 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this); 2478 2479 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words) 2480 / itableOffsetEntry::size(); 2481 2482 for (int i = 0; i < nof_interfaces; i ++, ioe ++) { 2483 if (ioe->interface_klass() != nullptr) { 2484 it->push(ioe->interface_klass_addr()); 2485 itableMethodEntry* ime = ioe->first_method_entry(this); 2486 int n = klassItable::method_count_for_interface(ioe->interface_klass()); 2487 for (int index = 0; index < n; index ++) { 2488 it->push(ime[index].method_addr()); 2489 } 2490 } 2491 } 2492 } 2493 2494 it->push(&_nest_members); 2495 it->push(&_permitted_subclasses); 2496 it->push(&_record_components); 2497 } 2498 2499 #if INCLUDE_CDS 2500 void InstanceKlass::remove_unshareable_info() { 2501 2502 if (is_linked()) { 2503 assert(can_be_verified_at_dumptime(), "must be"); 2504 // Remember this so we can avoid walking the hierarchy at runtime. 2505 set_verified_at_dump_time(); 2506 } 2507 2508 Klass::remove_unshareable_info(); 2509 2510 if (SystemDictionaryShared::has_class_failed_verification(this)) { 2511 // Classes are attempted to link during dumping and may fail, 2512 // but these classes are still in the dictionary and class list in CLD. 2513 // If the class has failed verification, there is nothing else to remove. 2514 return; 2515 } 2516 2517 // Reset to the 'allocated' state to prevent any premature accessing to 2518 // a shared class at runtime while the class is still being loaded and 2519 // restored. A class' init_state is set to 'loaded' at runtime when it's 2520 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()). 2521 _init_state = allocated; 2522 2523 { // Otherwise this needs to take out the Compile_lock. 2524 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 2525 init_implementor(); 2526 } 2527 2528 // Call remove_unshareable_info() on other objects that belong to this class, except 2529 // for constants()->remove_unshareable_info(), which is called in a separate pass in 2530 // ArchiveBuilder::make_klasses_shareable(), 2531 2532 for (int i = 0; i < methods()->length(); i++) { 2533 Method* m = methods()->at(i); 2534 m->remove_unshareable_info(); 2535 } 2536 2537 // do array classes also. 2538 if (array_klasses() != nullptr) { 2539 array_klasses()->remove_unshareable_info(); 2540 } 2541 2542 // These are not allocated from metaspace. They are safe to set to null. 2543 _source_debug_extension = nullptr; 2544 _dep_context = nullptr; 2545 _osr_nmethods_head = nullptr; 2546 #if INCLUDE_JVMTI 2547 _breakpoints = nullptr; 2548 _previous_versions = nullptr; 2549 _cached_class_file = nullptr; 2550 _jvmti_cached_class_field_map = nullptr; 2551 #endif 2552 2553 _init_thread = nullptr; 2554 _methods_jmethod_ids = nullptr; 2555 _jni_ids = nullptr; 2556 _oop_map_cache = nullptr; 2557 // clear _nest_host to ensure re-load at runtime 2558 _nest_host = nullptr; 2559 init_shared_package_entry(); 2560 _dep_context_last_cleaned = 0; 2561 2562 remove_unshareable_flags(); 2563 } 2564 2565 void InstanceKlass::remove_unshareable_flags() { 2566 // clear all the flags/stats that shouldn't be in the archived version 2567 assert(!is_scratch_class(), "must be"); 2568 assert(!has_been_redefined(), "must be"); 2569 #if INCLUDE_JVMTI 2570 set_is_being_redefined(false); 2571 #endif 2572 set_has_resolved_methods(false); 2573 } 2574 2575 void InstanceKlass::remove_java_mirror() { 2576 Klass::remove_java_mirror(); 2577 2578 // do array classes also. 2579 if (array_klasses() != nullptr) { 2580 array_klasses()->remove_java_mirror(); 2581 } 2582 } 2583 2584 void InstanceKlass::init_shared_package_entry() { 2585 assert(CDSConfig::is_dumping_archive(), "must be"); 2586 #if !INCLUDE_CDS_JAVA_HEAP 2587 _package_entry = nullptr; 2588 #else 2589 if (CDSConfig::is_dumping_full_module_graph()) { 2590 if (is_shared_unregistered_class()) { 2591 _package_entry = nullptr; 2592 } else { 2593 _package_entry = PackageEntry::get_archived_entry(_package_entry); 2594 } 2595 } else if (CDSConfig::is_dumping_dynamic_archive() && 2596 CDSConfig::is_using_full_module_graph() && 2597 MetaspaceShared::is_in_shared_metaspace(_package_entry)) { 2598 // _package_entry is an archived package in the base archive. Leave it as is. 2599 } else { 2600 _package_entry = nullptr; 2601 } 2602 ArchivePtrMarker::mark_pointer((address**)&_package_entry); 2603 #endif 2604 } 2605 2606 void InstanceKlass::compute_has_loops_flag_for_methods() { 2607 Array<Method*>* methods = this->methods(); 2608 for (int index = 0; index < methods->length(); ++index) { 2609 Method* m = methods->at(index); 2610 if (!m->is_overpass()) { // work around JDK-8305771 2611 m->compute_has_loops_flag(); 2612 } 2613 } 2614 } 2615 2616 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, 2617 PackageEntry* pkg_entry, TRAPS) { 2618 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded 2619 // before the InstanceKlass is added to the SystemDictionary. Make 2620 // sure the current state is <loaded. 2621 assert(!is_loaded(), "invalid init state"); 2622 assert(!shared_loading_failed(), "Must not try to load failed class again"); 2623 set_package(loader_data, pkg_entry, CHECK); 2624 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2625 2626 Array<Method*>* methods = this->methods(); 2627 int num_methods = methods->length(); 2628 for (int index = 0; index < num_methods; ++index) { 2629 methods->at(index)->restore_unshareable_info(CHECK); 2630 } 2631 #if INCLUDE_JVMTI 2632 if (JvmtiExport::has_redefined_a_class()) { 2633 // Reinitialize vtable because RedefineClasses may have changed some 2634 // entries in this vtable for super classes so the CDS vtable might 2635 // point to old or obsolete entries. RedefineClasses doesn't fix up 2636 // vtables in the shared system dictionary, only the main one. 2637 // It also redefines the itable too so fix that too. 2638 // First fix any default methods that point to a super class that may 2639 // have been redefined. 2640 bool trace_name_printed = false; 2641 adjust_default_methods(&trace_name_printed); 2642 vtable().initialize_vtable(); 2643 itable().initialize_itable(); 2644 } 2645 #endif 2646 2647 // restore constant pool resolved references 2648 constants()->restore_unshareable_info(CHECK); 2649 2650 if (array_klasses() != nullptr) { 2651 // To get a consistent list of classes we need MultiArray_lock to ensure 2652 // array classes aren't observed while they are being restored. 2653 RecursiveLocker rl(MultiArray_lock, THREAD); 2654 assert(this == array_klasses()->bottom_klass(), "sanity"); 2655 // Array classes have null protection domain. 2656 // --> see ArrayKlass::complete_create_array_klass() 2657 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK); 2658 } 2659 2660 // Initialize @ValueBased class annotation if not already set in the archived klass. 2661 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) { 2662 set_is_value_based(); 2663 } 2664 } 2665 2666 // Check if a class or any of its supertypes has a version older than 50. 2667 // CDS will not perform verification of old classes during dump time because 2668 // without changing the old verifier, the verification constraint cannot be 2669 // retrieved during dump time. 2670 // Verification of archived old classes will be performed during run time. 2671 bool InstanceKlass::can_be_verified_at_dumptime() const { 2672 if (MetaspaceShared::is_in_shared_metaspace(this)) { 2673 // This is a class that was dumped into the base archive, so we know 2674 // it was verified at dump time. 2675 return true; 2676 } 2677 if (major_version() < 50 /*JAVA_6_VERSION*/) { 2678 return false; 2679 } 2680 if (java_super() != nullptr && !java_super()->can_be_verified_at_dumptime()) { 2681 return false; 2682 } 2683 Array<InstanceKlass*>* interfaces = local_interfaces(); 2684 int len = interfaces->length(); 2685 for (int i = 0; i < len; i++) { 2686 if (!interfaces->at(i)->can_be_verified_at_dumptime()) { 2687 return false; 2688 } 2689 } 2690 return true; 2691 } 2692 #endif // INCLUDE_CDS 2693 2694 #if INCLUDE_JVMTI 2695 static void clear_all_breakpoints(Method* m) { 2696 m->clear_all_breakpoints(); 2697 } 2698 #endif 2699 2700 void InstanceKlass::unload_class(InstanceKlass* ik) { 2701 2702 if (ik->is_scratch_class()) { 2703 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes"); 2704 return; 2705 } 2706 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik)); 2707 2708 // Release dependencies. 2709 ik->dependencies().remove_all_dependents(); 2710 2711 // notify the debugger 2712 if (JvmtiExport::should_post_class_unload()) { 2713 JvmtiExport::post_class_unload(ik); 2714 } 2715 2716 // notify ClassLoadingService of class unload 2717 ClassLoadingService::notify_class_unloaded(ik); 2718 2719 SystemDictionaryShared::handle_class_unloading(ik); 2720 2721 if (log_is_enabled(Info, class, unload)) { 2722 ResourceMark rm; 2723 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik)); 2724 } 2725 2726 Events::log_class_unloading(Thread::current(), ik); 2727 2728 #if INCLUDE_JFR 2729 assert(ik != nullptr, "invariant"); 2730 EventClassUnload event; 2731 event.set_unloadedClass(ik); 2732 event.set_definingClassLoader(ik->class_loader_data()); 2733 event.commit(); 2734 #endif 2735 } 2736 2737 static void method_release_C_heap_structures(Method* m) { 2738 m->release_C_heap_structures(); 2739 } 2740 2741 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata. 2742 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) { 2743 // Clean up C heap 2744 Klass::release_C_heap_structures(); 2745 2746 // Deallocate and call destructors for MDO mutexes 2747 if (release_sub_metadata) { 2748 methods_do(method_release_C_heap_structures); 2749 } 2750 2751 // Deallocate oop map cache 2752 if (_oop_map_cache != nullptr) { 2753 delete _oop_map_cache; 2754 _oop_map_cache = nullptr; 2755 } 2756 2757 // Deallocate JNI identifiers for jfieldIDs 2758 JNIid::deallocate(jni_ids()); 2759 set_jni_ids(nullptr); 2760 2761 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2762 if (jmeths != nullptr) { 2763 release_set_methods_jmethod_ids(nullptr); 2764 FreeHeap(jmeths); 2765 } 2766 2767 assert(_dep_context == nullptr, 2768 "dependencies should already be cleaned"); 2769 2770 #if INCLUDE_JVMTI 2771 // Deallocate breakpoint records 2772 if (breakpoints() != nullptr) { 2773 methods_do(clear_all_breakpoints); 2774 assert(breakpoints() == nullptr, "should have cleared breakpoints"); 2775 } 2776 2777 // deallocate the cached class file 2778 if (_cached_class_file != nullptr) { 2779 os::free(_cached_class_file); 2780 _cached_class_file = nullptr; 2781 } 2782 #endif 2783 2784 FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2785 2786 if (release_sub_metadata) { 2787 constants()->release_C_heap_structures(); 2788 } 2789 } 2790 2791 // The constant pool is on stack if any of the methods are executing or 2792 // referenced by handles. 2793 bool InstanceKlass::on_stack() const { 2794 return _constants->on_stack(); 2795 } 2796 2797 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); } 2798 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); } 2799 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); } 2800 2801 // minor and major version numbers of class file 2802 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); } 2803 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); } 2804 u2 InstanceKlass::major_version() const { return _constants->major_version(); } 2805 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); } 2806 2807 InstanceKlass* InstanceKlass::get_klass_version(int version) { 2808 for (InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) { 2809 if (ik->constants()->version() == version) { 2810 return ik; 2811 } 2812 } 2813 return nullptr; 2814 } 2815 2816 void InstanceKlass::set_source_debug_extension(const char* array, int length) { 2817 if (array == nullptr) { 2818 _source_debug_extension = nullptr; 2819 } else { 2820 // Adding one to the attribute length in order to store a null terminator 2821 // character could cause an overflow because the attribute length is 2822 // already coded with an u4 in the classfile, but in practice, it's 2823 // unlikely to happen. 2824 assert((length+1) > length, "Overflow checking"); 2825 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2826 for (int i = 0; i < length; i++) { 2827 sde[i] = array[i]; 2828 } 2829 sde[length] = '\0'; 2830 _source_debug_extension = sde; 2831 } 2832 } 2833 2834 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); } 2835 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); } 2836 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); } 2837 2838 const char* InstanceKlass::signature_name() const { 2839 2840 // Get the internal name as a c string 2841 const char* src = (const char*) (name()->as_C_string()); 2842 const int src_length = (int)strlen(src); 2843 2844 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3); 2845 2846 // Add L as type indicator 2847 int dest_index = 0; 2848 dest[dest_index++] = JVM_SIGNATURE_CLASS; 2849 2850 // Add the actual class name 2851 for (int src_index = 0; src_index < src_length; ) { 2852 dest[dest_index++] = src[src_index++]; 2853 } 2854 2855 if (is_hidden()) { // Replace the last '+' with a '.'. 2856 for (int index = (int)src_length; index > 0; index--) { 2857 if (dest[index] == '+') { 2858 dest[index] = JVM_SIGNATURE_DOT; 2859 break; 2860 } 2861 } 2862 } 2863 2864 // Add the semicolon and the null 2865 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS; 2866 dest[dest_index] = '\0'; 2867 return dest; 2868 } 2869 2870 ModuleEntry* InstanceKlass::module() const { 2871 if (is_hidden() && 2872 in_unnamed_package() && 2873 class_loader_data()->has_class_mirror_holder()) { 2874 // For a non-strong hidden class defined to an unnamed package, 2875 // its (class held) CLD will not have an unnamed module created for it. 2876 // Two choices to find the correct ModuleEntry: 2877 // 1. If hidden class is within a nest, use nest host's module 2878 // 2. Find the unnamed module off from the class loader 2879 // For now option #2 is used since a nest host is not set until 2880 // after the instance class is created in jvm_lookup_define_class(). 2881 if (class_loader_data()->is_boot_class_loader_data()) { 2882 return ClassLoaderData::the_null_class_loader_data()->unnamed_module(); 2883 } else { 2884 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader()); 2885 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module"); 2886 return java_lang_Module::module_entry(module); 2887 } 2888 } 2889 2890 // Class is in a named package 2891 if (!in_unnamed_package()) { 2892 return _package_entry->module(); 2893 } 2894 2895 // Class is in an unnamed package, return its loader's unnamed module 2896 return class_loader_data()->unnamed_module(); 2897 } 2898 2899 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) { 2900 2901 // ensure java/ packages only loaded by boot or platform builtin loaders 2902 // not needed for shared class since CDS does not archive prohibited classes. 2903 if (!is_shared()) { 2904 check_prohibited_package(name(), loader_data, CHECK); 2905 } 2906 2907 if (is_shared() && _package_entry != nullptr) { 2908 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) { 2909 // we can use the saved package 2910 assert(MetaspaceShared::is_in_shared_metaspace(_package_entry), "must be"); 2911 return; 2912 } else { 2913 _package_entry = nullptr; 2914 } 2915 } 2916 2917 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol 2918 // it returns, so we need to decrement it when the current function exits. 2919 TempNewSymbol from_class_name = 2920 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name()); 2921 2922 Symbol* pkg_name; 2923 if (pkg_entry != nullptr) { 2924 pkg_name = pkg_entry->name(); 2925 } else { 2926 pkg_name = from_class_name; 2927 } 2928 2929 if (pkg_name != nullptr && loader_data != nullptr) { 2930 2931 // Find in class loader's package entry table. 2932 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name); 2933 2934 // If the package name is not found in the loader's package 2935 // entry table, it is an indication that the package has not 2936 // been defined. Consider it defined within the unnamed module. 2937 if (_package_entry == nullptr) { 2938 2939 if (!ModuleEntryTable::javabase_defined()) { 2940 // Before java.base is defined during bootstrapping, define all packages in 2941 // the java.base module. If a non-java.base package is erroneously placed 2942 // in the java.base module it will be caught later when java.base 2943 // is defined by ModuleEntryTable::verify_javabase_packages check. 2944 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null"); 2945 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry()); 2946 } else { 2947 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null"); 2948 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module()); 2949 } 2950 2951 // A package should have been successfully created 2952 DEBUG_ONLY(ResourceMark rm(THREAD)); 2953 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s", 2954 name()->as_C_string(), loader_data->loader_name_and_id()); 2955 } 2956 2957 if (log_is_enabled(Debug, module)) { 2958 ResourceMark rm(THREAD); 2959 ModuleEntry* m = _package_entry->module(); 2960 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s", 2961 external_name(), 2962 pkg_name->as_C_string(), 2963 loader_data->loader_name_and_id(), 2964 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE)); 2965 } 2966 } else { 2967 ResourceMark rm(THREAD); 2968 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s", 2969 external_name(), 2970 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null", 2971 UNNAMED_MODULE); 2972 } 2973 } 2974 2975 // Function set_classpath_index ensures that for a non-null _package_entry 2976 // of the InstanceKlass, the entry is in the boot loader's package entry table. 2977 // It then sets the classpath_index in the package entry record. 2978 // 2979 // The classpath_index field is used to find the entry on the boot loader class 2980 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a 2981 // in an unnamed module. It is also used to indicate (for all packages whose 2982 // classes are loaded by the boot loader) that at least one of the package's 2983 // classes has been loaded. 2984 void InstanceKlass::set_classpath_index(s2 path_index) { 2985 if (_package_entry != nullptr) { 2986 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();) 2987 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same"); 2988 assert(path_index != -1, "Unexpected classpath_index"); 2989 _package_entry->set_classpath_index(path_index); 2990 } 2991 } 2992 2993 // different versions of is_same_class_package 2994 2995 bool InstanceKlass::is_same_class_package(const Klass* class2) const { 2996 oop classloader1 = this->class_loader(); 2997 PackageEntry* classpkg1 = this->package(); 2998 if (class2->is_objArray_klass()) { 2999 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 3000 } 3001 3002 oop classloader2; 3003 PackageEntry* classpkg2; 3004 if (class2->is_instance_klass()) { 3005 classloader2 = class2->class_loader(); 3006 classpkg2 = class2->package(); 3007 } else { 3008 assert(class2->is_typeArray_klass(), "should be type array"); 3009 classloader2 = nullptr; 3010 classpkg2 = nullptr; 3011 } 3012 3013 // Same package is determined by comparing class loader 3014 // and package entries. Both must be the same. This rule 3015 // applies even to classes that are defined in the unnamed 3016 // package, they still must have the same class loader. 3017 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) { 3018 return true; 3019 } 3020 3021 return false; 3022 } 3023 3024 // return true if this class and other_class are in the same package. Classloader 3025 // and classname information is enough to determine a class's package 3026 bool InstanceKlass::is_same_class_package(oop other_class_loader, 3027 const Symbol* other_class_name) const { 3028 if (class_loader() != other_class_loader) { 3029 return false; 3030 } 3031 if (name()->fast_compare(other_class_name) == 0) { 3032 return true; 3033 } 3034 3035 { 3036 ResourceMark rm; 3037 3038 bool bad_class_name = false; 3039 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name); 3040 if (bad_class_name) { 3041 return false; 3042 } 3043 // Check that package_from_class_name() returns null, not "", if there is no package. 3044 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string"); 3045 3046 const Symbol* const this_package_name = 3047 this->package() != nullptr ? this->package()->name() : nullptr; 3048 3049 if (this_package_name == nullptr || other_pkg == nullptr) { 3050 // One of the two doesn't have a package. Only return true if the other 3051 // one also doesn't have a package. 3052 return this_package_name == other_pkg; 3053 } 3054 3055 // Check if package is identical 3056 return this_package_name->fast_compare(other_pkg) == 0; 3057 } 3058 } 3059 3060 static bool is_prohibited_package_slow(Symbol* class_name) { 3061 // Caller has ResourceMark 3062 int length; 3063 jchar* unicode = class_name->as_unicode(length); 3064 return (length >= 5 && 3065 unicode[0] == 'j' && 3066 unicode[1] == 'a' && 3067 unicode[2] == 'v' && 3068 unicode[3] == 'a' && 3069 unicode[4] == '/'); 3070 } 3071 3072 // Only boot and platform class loaders can define classes in "java/" packages. 3073 void InstanceKlass::check_prohibited_package(Symbol* class_name, 3074 ClassLoaderData* loader_data, 3075 TRAPS) { 3076 if (!loader_data->is_boot_class_loader_data() && 3077 !loader_data->is_platform_class_loader_data() && 3078 class_name != nullptr && class_name->utf8_length() >= 5) { 3079 ResourceMark rm(THREAD); 3080 bool prohibited; 3081 const u1* base = class_name->base(); 3082 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) { 3083 prohibited = is_prohibited_package_slow(class_name); 3084 } else { 3085 char* name = class_name->as_C_string(); 3086 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/'); 3087 } 3088 if (prohibited) { 3089 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name); 3090 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'"); 3091 char* name = pkg_name->as_C_string(); 3092 const char* class_loader_name = loader_data->loader_name_and_id(); 3093 StringUtils::replace_no_expand(name, "/", "."); 3094 const char* msg_text1 = "Class loader (instance of): "; 3095 const char* msg_text2 = " tried to load prohibited package name: "; 3096 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1; 3097 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len); 3098 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name); 3099 THROW_MSG(vmSymbols::java_lang_SecurityException(), message); 3100 } 3101 } 3102 return; 3103 } 3104 3105 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const { 3106 constantPoolHandle i_cp(THREAD, constants()); 3107 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) { 3108 int ioff = iter.inner_class_info_index(); 3109 if (ioff != 0) { 3110 // Check to see if the name matches the class we're looking for 3111 // before attempting to find the class. 3112 if (i_cp->klass_name_at_matches(this, ioff)) { 3113 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false); 3114 if (this == inner_klass) { 3115 *ooff = iter.outer_class_info_index(); 3116 *noff = iter.inner_name_index(); 3117 return true; 3118 } 3119 } 3120 } 3121 } 3122 return false; 3123 } 3124 3125 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const { 3126 InstanceKlass* outer_klass = nullptr; 3127 *inner_is_member = false; 3128 int ooff = 0, noff = 0; 3129 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD); 3130 if (has_inner_classes_attr) { 3131 constantPoolHandle i_cp(THREAD, constants()); 3132 if (ooff != 0) { 3133 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL); 3134 if (!ok->is_instance_klass()) { 3135 // If the outer class is not an instance klass then it cannot have 3136 // declared any inner classes. 3137 ResourceMark rm(THREAD); 3138 Exceptions::fthrow( 3139 THREAD_AND_LOCATION, 3140 vmSymbols::java_lang_IncompatibleClassChangeError(), 3141 "%s and %s disagree on InnerClasses attribute", 3142 ok->external_name(), 3143 external_name()); 3144 return nullptr; 3145 } 3146 outer_klass = InstanceKlass::cast(ok); 3147 *inner_is_member = true; 3148 } 3149 if (nullptr == outer_klass) { 3150 // It may be a local class; try for that. 3151 int encl_method_class_idx = enclosing_method_class_index(); 3152 if (encl_method_class_idx != 0) { 3153 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL); 3154 outer_klass = InstanceKlass::cast(ok); 3155 *inner_is_member = false; 3156 } 3157 } 3158 } 3159 3160 // If no inner class attribute found for this class. 3161 if (nullptr == outer_klass) return nullptr; 3162 3163 // Throws an exception if outer klass has not declared k as an inner klass 3164 // We need evidence that each klass knows about the other, or else 3165 // the system could allow a spoof of an inner class to gain access rights. 3166 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL); 3167 return outer_klass; 3168 } 3169 3170 jint InstanceKlass::compute_modifier_flags() const { 3171 jint access = access_flags().as_int(); 3172 3173 // But check if it happens to be member class. 3174 InnerClassesIterator iter(this); 3175 for (; !iter.done(); iter.next()) { 3176 int ioff = iter.inner_class_info_index(); 3177 // Inner class attribute can be zero, skip it. 3178 // Strange but true: JVM spec. allows null inner class refs. 3179 if (ioff == 0) continue; 3180 3181 // only look at classes that are already loaded 3182 // since we are looking for the flags for our self. 3183 Symbol* inner_name = constants()->klass_name_at(ioff); 3184 if (name() == inner_name) { 3185 // This is really a member class. 3186 access = iter.inner_access_flags(); 3187 break; 3188 } 3189 } 3190 // Remember to strip ACC_SUPER bit 3191 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 3192 } 3193 3194 jint InstanceKlass::jvmti_class_status() const { 3195 jint result = 0; 3196 3197 if (is_linked()) { 3198 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 3199 } 3200 3201 if (is_initialized()) { 3202 assert(is_linked(), "Class status is not consistent"); 3203 result |= JVMTI_CLASS_STATUS_INITIALIZED; 3204 } 3205 if (is_in_error_state()) { 3206 result |= JVMTI_CLASS_STATUS_ERROR; 3207 } 3208 return result; 3209 } 3210 3211 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) { 3212 bool implements_interface; // initialized by method_at_itable_or_null 3213 Method* m = method_at_itable_or_null(holder, index, 3214 implements_interface); // out parameter 3215 if (m != nullptr) { 3216 assert(implements_interface, "sanity"); 3217 return m; 3218 } else if (implements_interface) { 3219 // Throw AbstractMethodError since corresponding itable slot is empty. 3220 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 3221 } else { 3222 // If the interface isn't implemented by the receiver class, 3223 // the VM should throw IncompatibleClassChangeError. 3224 ResourceMark rm(THREAD); 3225 stringStream ss; 3226 bool same_module = (module() == holder->module()); 3227 ss.print("Receiver class %s does not implement " 3228 "the interface %s defining the method to be called " 3229 "(%s%s%s)", 3230 external_name(), holder->external_name(), 3231 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(), 3232 (same_module) ? "" : "; ", 3233 (same_module) ? "" : holder->class_in_module_of_loader()); 3234 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string()); 3235 } 3236 } 3237 3238 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) { 3239 klassItable itable(this); 3240 for (int i = 0; i < itable.size_offset_table(); i++) { 3241 itableOffsetEntry* offset_entry = itable.offset_entry(i); 3242 if (offset_entry->interface_klass() == holder) { 3243 implements_interface = true; 3244 itableMethodEntry* ime = offset_entry->first_method_entry(this); 3245 Method* m = ime[index].method(); 3246 return m; 3247 } 3248 } 3249 implements_interface = false; 3250 return nullptr; // offset entry not found 3251 } 3252 3253 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) { 3254 assert(is_linked(), "required"); 3255 assert(intf_method->method_holder()->is_interface(), "not an interface method"); 3256 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented"); 3257 3258 int vtable_index = Method::invalid_vtable_index; 3259 Symbol* name = intf_method->name(); 3260 Symbol* signature = intf_method->signature(); 3261 3262 // First check in default method array 3263 if (!intf_method->is_abstract() && default_methods() != nullptr) { 3264 int index = find_method_index(default_methods(), 3265 name, signature, 3266 Klass::OverpassLookupMode::find, 3267 Klass::StaticLookupMode::find, 3268 Klass::PrivateLookupMode::find); 3269 if (index >= 0) { 3270 vtable_index = default_vtable_indices()->at(index); 3271 } 3272 } 3273 if (vtable_index == Method::invalid_vtable_index) { 3274 // get vtable_index for miranda methods 3275 klassVtable vt = vtable(); 3276 vtable_index = vt.index_of_miranda(name, signature); 3277 } 3278 return vtable_index; 3279 } 3280 3281 #if INCLUDE_JVMTI 3282 // update default_methods for redefineclasses for methods that are 3283 // not yet in the vtable due to concurrent subclass define and superinterface 3284 // redefinition 3285 // Note: those in the vtable, should have been updated via adjust_method_entries 3286 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) { 3287 // search the default_methods for uses of either obsolete or EMCP methods 3288 if (default_methods() != nullptr) { 3289 for (int index = 0; index < default_methods()->length(); index ++) { 3290 Method* old_method = default_methods()->at(index); 3291 if (old_method == nullptr || !old_method->is_old()) { 3292 continue; // skip uninteresting entries 3293 } 3294 assert(!old_method->is_deleted(), "default methods may not be deleted"); 3295 Method* new_method = old_method->get_new_method(); 3296 default_methods()->at_put(index, new_method); 3297 3298 if (log_is_enabled(Info, redefine, class, update)) { 3299 ResourceMark rm; 3300 if (!(*trace_name_printed)) { 3301 log_info(redefine, class, update) 3302 ("adjust: klassname=%s default methods from name=%s", 3303 external_name(), old_method->method_holder()->external_name()); 3304 *trace_name_printed = true; 3305 } 3306 log_debug(redefine, class, update, vtables) 3307 ("default method update: %s(%s) ", 3308 new_method->name()->as_C_string(), new_method->signature()->as_C_string()); 3309 } 3310 } 3311 } 3312 } 3313 #endif // INCLUDE_JVMTI 3314 3315 // On-stack replacement stuff 3316 void InstanceKlass::add_osr_nmethod(nmethod* n) { 3317 assert_lock_strong(NMethodState_lock); 3318 #ifndef PRODUCT 3319 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true); 3320 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation), 3321 "redundant OSR recompilation detected. memory leak in CodeCache!"); 3322 #endif 3323 // only one compilation can be active 3324 assert(n->is_osr_method(), "wrong kind of nmethod"); 3325 n->set_osr_link(osr_nmethods_head()); 3326 set_osr_nmethods_head(n); 3327 // Raise the highest osr level if necessary 3328 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level())); 3329 3330 // Get rid of the osr methods for the same bci that have lower levels. 3331 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 3332 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 3333 if (inv != nullptr && inv->is_in_use()) { 3334 inv->make_not_entrant(); 3335 } 3336 } 3337 } 3338 3339 // Remove osr nmethod from the list. Return true if found and removed. 3340 bool InstanceKlass::remove_osr_nmethod(nmethod* n) { 3341 // This is a short non-blocking critical region, so the no safepoint check is ok. 3342 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag); 3343 assert(n->is_osr_method(), "wrong kind of nmethod"); 3344 nmethod* last = nullptr; 3345 nmethod* cur = osr_nmethods_head(); 3346 int max_level = CompLevel_none; // Find the max comp level excluding n 3347 Method* m = n->method(); 3348 // Search for match 3349 bool found = false; 3350 while(cur != nullptr && cur != n) { 3351 if (m == cur->method()) { 3352 // Find max level before n 3353 max_level = MAX2(max_level, cur->comp_level()); 3354 } 3355 last = cur; 3356 cur = cur->osr_link(); 3357 } 3358 nmethod* next = nullptr; 3359 if (cur == n) { 3360 found = true; 3361 next = cur->osr_link(); 3362 if (last == nullptr) { 3363 // Remove first element 3364 set_osr_nmethods_head(next); 3365 } else { 3366 last->set_osr_link(next); 3367 } 3368 } 3369 n->set_osr_link(nullptr); 3370 cur = next; 3371 while (cur != nullptr) { 3372 // Find max level after n 3373 if (m == cur->method()) { 3374 max_level = MAX2(max_level, cur->comp_level()); 3375 } 3376 cur = cur->osr_link(); 3377 } 3378 m->set_highest_osr_comp_level(max_level); 3379 return found; 3380 } 3381 3382 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) { 3383 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag); 3384 nmethod* osr = osr_nmethods_head(); 3385 int found = 0; 3386 while (osr != nullptr) { 3387 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 3388 if (osr->method() == m) { 3389 deopt_scope->mark(osr); 3390 found++; 3391 } 3392 osr = osr->osr_link(); 3393 } 3394 return found; 3395 } 3396 3397 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 3398 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag); 3399 nmethod* osr = osr_nmethods_head(); 3400 nmethod* best = nullptr; 3401 while (osr != nullptr) { 3402 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 3403 // There can be a time when a c1 osr method exists but we are waiting 3404 // for a c2 version. When c2 completes its osr nmethod we will trash 3405 // the c1 version and only be able to find the c2 version. However 3406 // while we overflow in the c1 code at back branches we don't want to 3407 // try and switch to the same code as we are already running 3408 3409 if (osr->method() == m && 3410 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 3411 if (match_level) { 3412 if (osr->comp_level() == comp_level) { 3413 // Found a match - return it. 3414 return osr; 3415 } 3416 } else { 3417 if (best == nullptr || (osr->comp_level() > best->comp_level())) { 3418 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) { 3419 // Found the best possible - return it. 3420 return osr; 3421 } 3422 best = osr; 3423 } 3424 } 3425 } 3426 osr = osr->osr_link(); 3427 } 3428 3429 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set"); 3430 if (best != nullptr && best->comp_level() >= comp_level) { 3431 return best; 3432 } 3433 return nullptr; 3434 } 3435 3436 // ----------------------------------------------------------------------------------------------------- 3437 // Printing 3438 3439 #define BULLET " - " 3440 3441 static const char* state_names[] = { 3442 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 3443 }; 3444 3445 static void print_vtable(intptr_t* start, int len, outputStream* st) { 3446 for (int i = 0; i < len; i++) { 3447 intptr_t e = start[i]; 3448 st->print("%d : " INTPTR_FORMAT, i, e); 3449 if (MetaspaceObj::is_valid((Metadata*)e)) { 3450 st->print(" "); 3451 ((Metadata*)e)->print_value_on(st); 3452 } 3453 st->cr(); 3454 } 3455 } 3456 3457 static void print_vtable(vtableEntry* start, int len, outputStream* st) { 3458 return print_vtable(reinterpret_cast<intptr_t*>(start), len, st); 3459 } 3460 3461 const char* InstanceKlass::init_state_name() const { 3462 return state_names[init_state()]; 3463 } 3464 3465 void InstanceKlass::print_on(outputStream* st) const { 3466 assert(is_klass(), "must be klass"); 3467 Klass::print_on(st); 3468 3469 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 3470 st->print(BULLET"klass size: %d", size()); st->cr(); 3471 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 3472 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr(); 3473 st->print(BULLET"state: "); st->print_cr("%s", init_state_name()); 3474 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 3475 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr(); 3476 st->print(BULLET"sub: "); 3477 Klass* sub = subklass(); 3478 int n; 3479 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) { 3480 if (n < MaxSubklassPrintSize) { 3481 sub->print_value_on(st); 3482 st->print(" "); 3483 } 3484 } 3485 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize); 3486 st->cr(); 3487 3488 if (is_interface()) { 3489 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 3490 if (nof_implementors() == 1) { 3491 st->print_cr(BULLET"implementor: "); 3492 st->print(" "); 3493 implementor()->print_value_on(st); 3494 st->cr(); 3495 } 3496 } 3497 3498 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr(); 3499 st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr(); 3500 if (Verbose || WizardMode) { 3501 Array<Method*>* method_array = methods(); 3502 for (int i = 0; i < method_array->length(); i++) { 3503 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr(); 3504 } 3505 } 3506 st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr(); 3507 if (default_methods() != nullptr) { 3508 st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr(); 3509 if (Verbose) { 3510 Array<Method*>* method_array = default_methods(); 3511 for (int i = 0; i < method_array->length(); i++) { 3512 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr(); 3513 } 3514 } 3515 } 3516 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices()); 3517 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr(); 3518 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr(); 3519 3520 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr(); 3521 3522 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr(); 3523 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr(); 3524 3525 if (secondary_supers() != nullptr) { 3526 if (Verbose) { 3527 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL); 3528 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length()); 3529 for (int i = 0; i < _secondary_supers->length(); i++) { 3530 ResourceMark rm; // for external_name() 3531 Klass* secondary_super = _secondary_supers->at(i); 3532 st->print(BULLET"%2d:", i); 3533 if (is_hashed) { 3534 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap); 3535 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK; 3536 st->print(" dist:%02d:", distance); 3537 } 3538 st->print_cr(" %p %s", secondary_super, secondary_super->external_name()); 3539 } 3540 } 3541 } 3542 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 3543 3544 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data()); 3545 print_on_maybe_null(st, BULLET"source file: ", source_file_name()); 3546 if (source_debug_extension() != nullptr) { 3547 st->print(BULLET"source debug extension: "); 3548 st->print("%s", source_debug_extension()); 3549 st->cr(); 3550 } 3551 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations()); 3552 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations()); 3553 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations()); 3554 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations()); 3555 { 3556 bool have_pv = false; 3557 // previous versions are linked together through the InstanceKlass 3558 for (InstanceKlass* pv_node = previous_versions(); 3559 pv_node != nullptr; 3560 pv_node = pv_node->previous_versions()) { 3561 if (!have_pv) 3562 st->print(BULLET"previous version: "); 3563 have_pv = true; 3564 pv_node->constants()->print_value_on(st); 3565 } 3566 if (have_pv) st->cr(); 3567 } 3568 3569 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature()); 3570 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 3571 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr(); 3572 print_on_maybe_null(st, BULLET"record components: ", record_components()); 3573 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr(); 3574 if (java_mirror() != nullptr) { 3575 st->print(BULLET"java mirror: "); 3576 java_mirror()->print_value_on(st); 3577 st->cr(); 3578 } else { 3579 st->print_cr(BULLET"java mirror: null"); 3580 } 3581 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr(); 3582 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 3583 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr(); 3584 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st); 3585 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 3586 3587 FieldPrinter print_static_field(st); 3588 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 3589 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 3590 FieldPrinter print_nonstatic_field(st); 3591 InstanceKlass* ik = const_cast<InstanceKlass*>(this); 3592 ik->print_nonstatic_fields(&print_nonstatic_field); 3593 3594 st->print(BULLET"non-static oop maps: "); 3595 OopMapBlock* map = start_of_nonstatic_oop_maps(); 3596 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 3597 while (map < end_map) { 3598 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 3599 map++; 3600 } 3601 st->cr(); 3602 } 3603 3604 void InstanceKlass::print_value_on(outputStream* st) const { 3605 assert(is_klass(), "must be klass"); 3606 if (Verbose || WizardMode) access_flags().print_on(st); 3607 name()->print_value_on(st); 3608 } 3609 3610 void FieldPrinter::do_field(fieldDescriptor* fd) { 3611 _st->print(BULLET); 3612 if (_obj == nullptr) { 3613 fd->print_on(_st); 3614 _st->cr(); 3615 } else { 3616 fd->print_on_for(_st, _obj); 3617 _st->cr(); 3618 } 3619 } 3620 3621 3622 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 3623 Klass::oop_print_on(obj, st); 3624 3625 if (this == vmClasses::String_klass()) { 3626 typeArrayOop value = java_lang_String::value(obj); 3627 juint length = java_lang_String::length(obj); 3628 if (value != nullptr && 3629 value->is_typeArray() && 3630 length <= (juint) value->length()) { 3631 st->print(BULLET"string: "); 3632 java_lang_String::print(obj, st); 3633 st->cr(); 3634 } 3635 } 3636 3637 st->print_cr(BULLET"---- fields (total size " SIZE_FORMAT " words):", oop_size(obj)); 3638 FieldPrinter print_field(st, obj); 3639 print_nonstatic_fields(&print_field); 3640 3641 if (this == vmClasses::Class_klass()) { 3642 st->print(BULLET"signature: "); 3643 java_lang_Class::print_signature(obj, st); 3644 st->cr(); 3645 Klass* real_klass = java_lang_Class::as_Klass(obj); 3646 if (real_klass != nullptr && real_klass->is_instance_klass()) { 3647 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj)); 3648 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 3649 } 3650 } else if (this == vmClasses::MethodType_klass()) { 3651 st->print(BULLET"signature: "); 3652 java_lang_invoke_MethodType::print_signature(obj, st); 3653 st->cr(); 3654 } 3655 } 3656 3657 #ifndef PRODUCT 3658 3659 bool InstanceKlass::verify_itable_index(int i) { 3660 int method_count = klassItable::method_count_for_interface(this); 3661 assert(i >= 0 && i < method_count, "index out of bounds"); 3662 return true; 3663 } 3664 3665 #endif //PRODUCT 3666 3667 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3668 st->print("a "); 3669 name()->print_value_on(st); 3670 obj->print_address_on(st); 3671 if (this == vmClasses::String_klass() 3672 && java_lang_String::value(obj) != nullptr) { 3673 ResourceMark rm; 3674 int len = java_lang_String::length(obj); 3675 int plen = (len < 24 ? len : 12); 3676 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3677 st->print(" = \"%s\"", str); 3678 if (len > plen) 3679 st->print("...[%d]", len); 3680 } else if (this == vmClasses::Class_klass()) { 3681 Klass* k = java_lang_Class::as_Klass(obj); 3682 st->print(" = "); 3683 if (k != nullptr) { 3684 k->print_value_on(st); 3685 } else { 3686 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3687 st->print("%s", tname ? tname : "type?"); 3688 } 3689 } else if (this == vmClasses::MethodType_klass()) { 3690 st->print(" = "); 3691 java_lang_invoke_MethodType::print_signature(obj, st); 3692 } else if (java_lang_boxing_object::is_instance(obj)) { 3693 st->print(" = "); 3694 java_lang_boxing_object::print(obj, st); 3695 } else if (this == vmClasses::LambdaForm_klass()) { 3696 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3697 if (vmentry != nullptr) { 3698 st->print(" => "); 3699 vmentry->print_value_on(st); 3700 } 3701 } else if (this == vmClasses::MemberName_klass()) { 3702 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3703 if (vmtarget != nullptr) { 3704 st->print(" = "); 3705 vmtarget->print_value_on(st); 3706 } else { 3707 oop clazz = java_lang_invoke_MemberName::clazz(obj); 3708 oop name = java_lang_invoke_MemberName::name(obj); 3709 if (clazz != nullptr) { 3710 clazz->print_value_on(st); 3711 } else { 3712 st->print("null"); 3713 } 3714 st->print("."); 3715 if (name != nullptr) { 3716 name->print_value_on(st); 3717 } else { 3718 st->print("null"); 3719 } 3720 } 3721 } 3722 } 3723 3724 const char* InstanceKlass::internal_name() const { 3725 return external_name(); 3726 } 3727 3728 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data, 3729 const ModuleEntry* module_entry, 3730 const ClassFileStream* cfs) const { 3731 3732 if (ClassListWriter::is_enabled()) { 3733 ClassListWriter::write(this, cfs); 3734 } 3735 3736 print_class_load_helper(loader_data, module_entry, cfs); 3737 print_class_load_cause_logging(); 3738 } 3739 3740 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data, 3741 const ModuleEntry* module_entry, 3742 const ClassFileStream* cfs) const { 3743 3744 if (!log_is_enabled(Info, class, load)) { 3745 return; 3746 } 3747 3748 ResourceMark rm; 3749 LogMessage(class, load) msg; 3750 stringStream info_stream; 3751 3752 // Name and class hierarchy info 3753 info_stream.print("%s", external_name()); 3754 3755 // Source 3756 if (cfs != nullptr) { 3757 if (cfs->source() != nullptr) { 3758 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string(); 3759 if (module_name != nullptr) { 3760 // When the boot loader created the stream, it didn't know the module name 3761 // yet. Let's format it now. 3762 if (cfs->from_boot_loader_modules_image()) { 3763 info_stream.print(" source: jrt:/%s", module_name); 3764 } else { 3765 info_stream.print(" source: %s", cfs->source()); 3766 } 3767 } else { 3768 info_stream.print(" source: %s", cfs->source()); 3769 } 3770 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) { 3771 Thread* current = Thread::current(); 3772 Klass* caller = current->is_Java_thread() ? 3773 JavaThread::cast(current)->security_get_caller_class(1): 3774 nullptr; 3775 // caller can be null, for example, during a JVMTI VM_Init hook 3776 if (caller != nullptr) { 3777 info_stream.print(" source: instance of %s", caller->external_name()); 3778 } else { 3779 // source is unknown 3780 } 3781 } else { 3782 oop class_loader = loader_data->class_loader(); 3783 info_stream.print(" source: %s", class_loader->klass()->external_name()); 3784 } 3785 } else { 3786 assert(this->is_shared(), "must be"); 3787 if (MetaspaceShared::is_shared_dynamic((void*)this)) { 3788 info_stream.print(" source: shared objects file (top)"); 3789 } else { 3790 info_stream.print(" source: shared objects file"); 3791 } 3792 } 3793 3794 msg.info("%s", info_stream.as_string()); 3795 3796 if (log_is_enabled(Debug, class, load)) { 3797 stringStream debug_stream; 3798 3799 // Class hierarchy info 3800 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT, 3801 p2i(this), p2i(superklass())); 3802 3803 // Interfaces 3804 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) { 3805 debug_stream.print(" interfaces:"); 3806 int length = local_interfaces()->length(); 3807 for (int i = 0; i < length; i++) { 3808 debug_stream.print(" " PTR_FORMAT, 3809 p2i(InstanceKlass::cast(local_interfaces()->at(i)))); 3810 } 3811 } 3812 3813 // Class loader 3814 debug_stream.print(" loader: ["); 3815 loader_data->print_value_on(&debug_stream); 3816 debug_stream.print("]"); 3817 3818 // Classfile checksum 3819 if (cfs) { 3820 debug_stream.print(" bytes: %d checksum: %08x", 3821 cfs->length(), 3822 ClassLoader::crc32(0, (const char*)cfs->buffer(), 3823 cfs->length())); 3824 } 3825 3826 msg.debug("%s", debug_stream.as_string()); 3827 } 3828 } 3829 3830 void InstanceKlass::print_class_load_cause_logging() const { 3831 bool log_cause_native = log_is_enabled(Info, class, load, cause, native); 3832 if (log_cause_native || log_is_enabled(Info, class, load, cause)) { 3833 JavaThread* current = JavaThread::current(); 3834 ResourceMark rm(current); 3835 const char* name = external_name(); 3836 3837 if (LogClassLoadingCauseFor == nullptr || 3838 (strcmp("*", LogClassLoadingCauseFor) != 0 && 3839 strstr(name, LogClassLoadingCauseFor) == nullptr)) { 3840 return; 3841 } 3842 3843 // Log Java stack first 3844 { 3845 LogMessage(class, load, cause) msg; 3846 NonInterleavingLogStream info_stream{LogLevelType::Info, msg}; 3847 3848 info_stream.print_cr("Java stack when loading %s:", name); 3849 current->print_stack_on(&info_stream); 3850 } 3851 3852 // Log native stack second 3853 if (log_cause_native) { 3854 // Log to string first so that lines can be indented 3855 stringStream stack_stream; 3856 char buf[O_BUFLEN]; 3857 address lastpc = nullptr; 3858 if (os::platform_print_native_stack(&stack_stream, nullptr, buf, O_BUFLEN, lastpc)) { 3859 // We have printed the native stack in platform-specific code, 3860 // so nothing else to do in this case. 3861 } else { 3862 frame f = os::current_frame(); 3863 VMError::print_native_stack(&stack_stream, f, current, true /*print_source_info */, 3864 -1 /* max stack_stream */, buf, O_BUFLEN); 3865 } 3866 3867 LogMessage(class, load, cause, native) msg; 3868 NonInterleavingLogStream info_stream{LogLevelType::Info, msg}; 3869 info_stream.print_cr("Native stack when loading %s:", name); 3870 3871 // Print each native stack line to the log 3872 int size = (int) stack_stream.size(); 3873 char* stack = stack_stream.as_string(); 3874 char* stack_end = stack + size; 3875 char* line_start = stack; 3876 for (char* p = stack; p < stack_end; p++) { 3877 if (*p == '\n') { 3878 *p = '\0'; 3879 info_stream.print_cr("\t%s", line_start); 3880 line_start = p + 1; 3881 } 3882 } 3883 if (line_start < stack_end) { 3884 info_stream.print_cr("\t%s", line_start); 3885 } 3886 } 3887 } 3888 } 3889 3890 // Verification 3891 3892 class VerifyFieldClosure: public BasicOopIterateClosure { 3893 protected: 3894 template <class T> void do_oop_work(T* p) { 3895 oop obj = RawAccess<>::oop_load(p); 3896 if (!oopDesc::is_oop_or_null(obj)) { 3897 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj)); 3898 Universe::print_on(tty); 3899 guarantee(false, "boom"); 3900 } 3901 } 3902 public: 3903 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3904 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3905 }; 3906 3907 void InstanceKlass::verify_on(outputStream* st) { 3908 #ifndef PRODUCT 3909 // Avoid redundant verifies, this really should be in product. 3910 if (_verify_count == Universe::verify_count()) return; 3911 _verify_count = Universe::verify_count(); 3912 #endif 3913 3914 // Verify Klass 3915 Klass::verify_on(st); 3916 3917 // Verify that klass is present in ClassLoaderData 3918 guarantee(class_loader_data()->contains_klass(this), 3919 "this class isn't found in class loader data"); 3920 3921 // Verify vtables 3922 if (is_linked()) { 3923 // $$$ This used to be done only for m/s collections. Doing it 3924 // always seemed a valid generalization. (DLD -- 6/00) 3925 vtable().verify(st); 3926 } 3927 3928 // Verify first subklass 3929 if (subklass() != nullptr) { 3930 guarantee(subklass()->is_klass(), "should be klass"); 3931 } 3932 3933 // Verify siblings 3934 Klass* super = this->super(); 3935 Klass* sib = next_sibling(); 3936 if (sib != nullptr) { 3937 if (sib == this) { 3938 fatal("subclass points to itself " PTR_FORMAT, p2i(sib)); 3939 } 3940 3941 guarantee(sib->is_klass(), "should be klass"); 3942 guarantee(sib->super() == super, "siblings should have same superklass"); 3943 } 3944 3945 // Verify local interfaces 3946 if (local_interfaces()) { 3947 Array<InstanceKlass*>* local_interfaces = this->local_interfaces(); 3948 for (int j = 0; j < local_interfaces->length(); j++) { 3949 InstanceKlass* e = local_interfaces->at(j); 3950 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3951 } 3952 } 3953 3954 // Verify transitive interfaces 3955 if (transitive_interfaces() != nullptr) { 3956 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces(); 3957 for (int j = 0; j < transitive_interfaces->length(); j++) { 3958 InstanceKlass* e = transitive_interfaces->at(j); 3959 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3960 } 3961 } 3962 3963 // Verify methods 3964 if (methods() != nullptr) { 3965 Array<Method*>* methods = this->methods(); 3966 for (int j = 0; j < methods->length(); j++) { 3967 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3968 } 3969 for (int j = 0; j < methods->length() - 1; j++) { 3970 Method* m1 = methods->at(j); 3971 Method* m2 = methods->at(j + 1); 3972 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3973 } 3974 } 3975 3976 // Verify method ordering 3977 if (method_ordering() != nullptr) { 3978 Array<int>* method_ordering = this->method_ordering(); 3979 int length = method_ordering->length(); 3980 if (JvmtiExport::can_maintain_original_method_order() || 3981 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) { 3982 guarantee(length == methods()->length(), "invalid method ordering length"); 3983 jlong sum = 0; 3984 for (int j = 0; j < length; j++) { 3985 int original_index = method_ordering->at(j); 3986 guarantee(original_index >= 0, "invalid method ordering index"); 3987 guarantee(original_index < length, "invalid method ordering index"); 3988 sum += original_index; 3989 } 3990 // Verify sum of indices 0,1,...,length-1 3991 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3992 } else { 3993 guarantee(length == 0, "invalid method ordering length"); 3994 } 3995 } 3996 3997 // Verify default methods 3998 if (default_methods() != nullptr) { 3999 Array<Method*>* methods = this->default_methods(); 4000 for (int j = 0; j < methods->length(); j++) { 4001 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 4002 } 4003 for (int j = 0; j < methods->length() - 1; j++) { 4004 Method* m1 = methods->at(j); 4005 Method* m2 = methods->at(j + 1); 4006 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 4007 } 4008 } 4009 4010 // Verify JNI static field identifiers 4011 if (jni_ids() != nullptr) { 4012 jni_ids()->verify(this); 4013 } 4014 4015 // Verify other fields 4016 if (constants() != nullptr) { 4017 guarantee(constants()->is_constantPool(), "should be constant pool"); 4018 } 4019 } 4020 4021 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 4022 Klass::oop_verify_on(obj, st); 4023 VerifyFieldClosure blk; 4024 obj->oop_iterate(&blk); 4025 } 4026 4027 4028 // JNIid class for jfieldIDs only 4029 // Note to reviewers: 4030 // These JNI functions are just moved over to column 1 and not changed 4031 // in the compressed oops workspace. 4032 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 4033 _holder = holder; 4034 _offset = offset; 4035 _next = next; 4036 debug_only(_is_static_field_id = false;) 4037 } 4038 4039 4040 JNIid* JNIid::find(int offset) { 4041 JNIid* current = this; 4042 while (current != nullptr) { 4043 if (current->offset() == offset) return current; 4044 current = current->next(); 4045 } 4046 return nullptr; 4047 } 4048 4049 void JNIid::deallocate(JNIid* current) { 4050 while (current != nullptr) { 4051 JNIid* next = current->next(); 4052 delete current; 4053 current = next; 4054 } 4055 } 4056 4057 4058 void JNIid::verify(Klass* holder) { 4059 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 4060 int end_field_offset; 4061 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 4062 4063 JNIid* current = this; 4064 while (current != nullptr) { 4065 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 4066 #ifdef ASSERT 4067 int o = current->offset(); 4068 if (current->is_static_field_id()) { 4069 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 4070 } 4071 #endif 4072 current = current->next(); 4073 } 4074 } 4075 4076 void InstanceKlass::set_init_state(ClassState state) { 4077 #ifdef ASSERT 4078 bool good_state = is_shared() ? (_init_state <= state) 4079 : (_init_state < state); 4080 assert(good_state || state == allocated, "illegal state transition"); 4081 #endif 4082 assert(_init_thread == nullptr, "should be cleared before state change"); 4083 Atomic::release_store(&_init_state, state); 4084 } 4085 4086 #if INCLUDE_JVMTI 4087 4088 // RedefineClasses() support for previous versions 4089 4090 // Globally, there is at least one previous version of a class to walk 4091 // during class unloading, which is saved because old methods in the class 4092 // are still running. Otherwise the previous version list is cleaned up. 4093 bool InstanceKlass::_should_clean_previous_versions = false; 4094 4095 // Returns true if there are previous versions of a class for class 4096 // unloading only. Also resets the flag to false. purge_previous_version 4097 // will set the flag to true if there are any left, i.e., if there's any 4098 // work to do for next time. This is to avoid the expensive code cache 4099 // walk in CLDG::clean_deallocate_lists(). 4100 bool InstanceKlass::should_clean_previous_versions_and_reset() { 4101 bool ret = _should_clean_previous_versions; 4102 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s", 4103 ret ? "true" : "false"); 4104 _should_clean_previous_versions = false; 4105 return ret; 4106 } 4107 4108 // This nulls out jmethodIDs for all methods in 'klass' 4109 // It needs to be called explicitly for all previous versions of a class because these may not be cleaned up 4110 // during class unloading. 4111 // We can not use the jmethodID cache associated with klass directly because the 'previous' versions 4112 // do not have the jmethodID cache filled in. Instead, we need to lookup jmethodID for each method and this 4113 // is expensive - O(n) for one jmethodID lookup. For all contained methods it is O(n^2). 4114 // The reason for expensive jmethodID lookup for each method is that there is no direct link between method and jmethodID. 4115 void InstanceKlass::clear_jmethod_ids(InstanceKlass* klass) { 4116 Array<Method*>* method_refs = klass->methods(); 4117 for (int k = 0; k < method_refs->length(); k++) { 4118 Method* method = method_refs->at(k); 4119 if (method != nullptr && method->is_obsolete()) { 4120 method->clear_jmethod_id(); 4121 } 4122 } 4123 } 4124 4125 // Purge previous versions before adding new previous versions of the class and 4126 // during class unloading. 4127 void InstanceKlass::purge_previous_version_list() { 4128 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 4129 assert(has_been_redefined(), "Should only be called for main class"); 4130 4131 // Quick exit. 4132 if (previous_versions() == nullptr) { 4133 return; 4134 } 4135 4136 // This klass has previous versions so see what we can cleanup 4137 // while it is safe to do so. 4138 4139 int deleted_count = 0; // leave debugging breadcrumbs 4140 int live_count = 0; 4141 ClassLoaderData* loader_data = class_loader_data(); 4142 assert(loader_data != nullptr, "should never be null"); 4143 4144 ResourceMark rm; 4145 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name()); 4146 4147 // previous versions are linked together through the InstanceKlass 4148 InstanceKlass* pv_node = previous_versions(); 4149 InstanceKlass* last = this; 4150 int version = 0; 4151 4152 // check the previous versions list 4153 for (; pv_node != nullptr; ) { 4154 4155 ConstantPool* pvcp = pv_node->constants(); 4156 assert(pvcp != nullptr, "cp ref was unexpectedly cleared"); 4157 4158 if (!pvcp->on_stack()) { 4159 // If the constant pool isn't on stack, none of the methods 4160 // are executing. Unlink this previous_version. 4161 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 4162 // so will be deallocated during the next phase of class unloading. 4163 log_trace(redefine, class, iklass, purge) 4164 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node)); 4165 // Unlink from previous version list. 4166 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data"); 4167 InstanceKlass* next = pv_node->previous_versions(); 4168 clear_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class 4169 pv_node->link_previous_versions(nullptr); // point next to null 4170 last->link_previous_versions(next); 4171 // Delete this node directly. Nothing is referring to it and we don't 4172 // want it to increase the counter for metadata to delete in CLDG. 4173 MetadataFactory::free_metadata(loader_data, pv_node); 4174 pv_node = next; 4175 deleted_count++; 4176 version++; 4177 continue; 4178 } else { 4179 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder"); 4180 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 4181 live_count++; 4182 if (pvcp->is_shared()) { 4183 // Shared previous versions can never be removed so no cleaning is needed. 4184 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node)); 4185 } else { 4186 // Previous version alive, set that clean is needed for next time. 4187 _should_clean_previous_versions = true; 4188 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node)); 4189 } 4190 } 4191 4192 // next previous version 4193 last = pv_node; 4194 pv_node = pv_node->previous_versions(); 4195 version++; 4196 } 4197 log_trace(redefine, class, iklass, purge) 4198 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count); 4199 } 4200 4201 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 4202 int emcp_method_count) { 4203 int obsolete_method_count = old_methods->length() - emcp_method_count; 4204 4205 if (emcp_method_count != 0 && obsolete_method_count != 0 && 4206 _previous_versions != nullptr) { 4207 // We have a mix of obsolete and EMCP methods so we have to 4208 // clear out any matching EMCP method entries the hard way. 4209 int local_count = 0; 4210 for (int i = 0; i < old_methods->length(); i++) { 4211 Method* old_method = old_methods->at(i); 4212 if (old_method->is_obsolete()) { 4213 // only obsolete methods are interesting 4214 Symbol* m_name = old_method->name(); 4215 Symbol* m_signature = old_method->signature(); 4216 4217 // previous versions are linked together through the InstanceKlass 4218 int j = 0; 4219 for (InstanceKlass* prev_version = _previous_versions; 4220 prev_version != nullptr; 4221 prev_version = prev_version->previous_versions(), j++) { 4222 4223 Array<Method*>* method_refs = prev_version->methods(); 4224 for (int k = 0; k < method_refs->length(); k++) { 4225 Method* method = method_refs->at(k); 4226 4227 if (!method->is_obsolete() && 4228 method->name() == m_name && 4229 method->signature() == m_signature) { 4230 // The current RedefineClasses() call has made all EMCP 4231 // versions of this method obsolete so mark it as obsolete 4232 log_trace(redefine, class, iklass, add) 4233 ("%s(%s): flush obsolete method @%d in version @%d", 4234 m_name->as_C_string(), m_signature->as_C_string(), k, j); 4235 4236 method->set_is_obsolete(); 4237 break; 4238 } 4239 } 4240 4241 // The previous loop may not find a matching EMCP method, but 4242 // that doesn't mean that we can optimize and not go any 4243 // further back in the PreviousVersion generations. The EMCP 4244 // method for this generation could have already been made obsolete, 4245 // but there still may be an older EMCP method that has not 4246 // been made obsolete. 4247 } 4248 4249 if (++local_count >= obsolete_method_count) { 4250 // no more obsolete methods so bail out now 4251 break; 4252 } 4253 } 4254 } 4255 } 4256 } 4257 4258 // Save the scratch_class as the previous version if any of the methods are running. 4259 // The previous_versions are used to set breakpoints in EMCP methods and they are 4260 // also used to clean MethodData links to redefined methods that are no longer running. 4261 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class, 4262 int emcp_method_count) { 4263 assert(Thread::current()->is_VM_thread(), 4264 "only VMThread can add previous versions"); 4265 4266 ResourceMark rm; 4267 log_trace(redefine, class, iklass, add) 4268 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count); 4269 4270 // Clean out old previous versions for this class 4271 purge_previous_version_list(); 4272 4273 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 4274 // a previous redefinition may be made obsolete by this redefinition. 4275 Array<Method*>* old_methods = scratch_class->methods(); 4276 mark_newly_obsolete_methods(old_methods, emcp_method_count); 4277 4278 // If the constant pool for this previous version of the class 4279 // is not marked as being on the stack, then none of the methods 4280 // in this previous version of the class are on the stack so 4281 // we don't need to add this as a previous version. 4282 ConstantPool* cp_ref = scratch_class->constants(); 4283 if (!cp_ref->on_stack()) { 4284 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running"); 4285 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class); 4286 return; 4287 } 4288 4289 // Add previous version if any methods are still running or if this is 4290 // a shared class which should never be removed. 4291 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version"); 4292 scratch_class->link_previous_versions(previous_versions()); 4293 link_previous_versions(scratch_class); 4294 if (cp_ref->is_shared()) { 4295 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared"); 4296 } else { 4297 // We only set clean_previous_versions flag for processing during class 4298 // unloading for non-shared classes. 4299 _should_clean_previous_versions = true; 4300 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack."); 4301 } 4302 } // end add_previous_version() 4303 4304 #endif // INCLUDE_JVMTI 4305 4306 Method* InstanceKlass::method_with_idnum(int idnum) { 4307 Method* m = nullptr; 4308 if (idnum < methods()->length()) { 4309 m = methods()->at(idnum); 4310 } 4311 if (m == nullptr || m->method_idnum() != idnum) { 4312 for (int index = 0; index < methods()->length(); ++index) { 4313 m = methods()->at(index); 4314 if (m->method_idnum() == idnum) { 4315 return m; 4316 } 4317 } 4318 // None found, return null for the caller to handle. 4319 return nullptr; 4320 } 4321 return m; 4322 } 4323 4324 4325 Method* InstanceKlass::method_with_orig_idnum(int idnum) { 4326 if (idnum >= methods()->length()) { 4327 return nullptr; 4328 } 4329 Method* m = methods()->at(idnum); 4330 if (m != nullptr && m->orig_method_idnum() == idnum) { 4331 return m; 4332 } 4333 // Obsolete method idnum does not match the original idnum 4334 for (int index = 0; index < methods()->length(); ++index) { 4335 m = methods()->at(index); 4336 if (m->orig_method_idnum() == idnum) { 4337 return m; 4338 } 4339 } 4340 // None found, return null for the caller to handle. 4341 return nullptr; 4342 } 4343 4344 4345 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) { 4346 InstanceKlass* holder = get_klass_version(version); 4347 if (holder == nullptr) { 4348 return nullptr; // The version of klass is gone, no method is found 4349 } 4350 Method* method = holder->method_with_orig_idnum(idnum); 4351 return method; 4352 } 4353 4354 #if INCLUDE_JVMTI 4355 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() { 4356 return _cached_class_file; 4357 } 4358 4359 jint InstanceKlass::get_cached_class_file_len() { 4360 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 4361 } 4362 4363 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 4364 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 4365 } 4366 #endif 4367 4368 // Make a step iterating over the class hierarchy under the root class. 4369 // Skips subclasses if requested. 4370 void ClassHierarchyIterator::next() { 4371 assert(_current != nullptr, "required"); 4372 if (_visit_subclasses && _current->subklass() != nullptr) { 4373 _current = _current->subklass(); 4374 return; // visit next subclass 4375 } 4376 _visit_subclasses = true; // reset 4377 while (_current->next_sibling() == nullptr && _current != _root) { 4378 _current = _current->superklass(); // backtrack; no more sibling subclasses left 4379 } 4380 if (_current == _root) { 4381 // Iteration is over (back at root after backtracking). Invalidate the iterator. 4382 _current = nullptr; 4383 return; 4384 } 4385 _current = _current->next_sibling(); 4386 return; // visit next sibling subclass 4387 }