1 /* 2 * Copyright (c) 1998, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "cds/archiveBuilder.hpp" 27 #include "cds/cdsConfig.hpp" 28 #include "cds/heapShared.hpp" 29 #include "classfile/resolutionErrors.hpp" 30 #include "classfile/systemDictionary.hpp" 31 #include "classfile/systemDictionaryShared.hpp" 32 #include "classfile/vmClasses.hpp" 33 #include "code/codeCache.hpp" 34 #include "interpreter/bytecodeStream.hpp" 35 #include "interpreter/bytecodes.hpp" 36 #include "interpreter/interpreter.hpp" 37 #include "interpreter/linkResolver.hpp" 38 #include "interpreter/rewriter.hpp" 39 #include "logging/log.hpp" 40 #include "logging/logStream.hpp" 41 #include "memory/metadataFactory.hpp" 42 #include "memory/metaspaceClosure.hpp" 43 #include "memory/resourceArea.hpp" 44 #include "oops/access.inline.hpp" 45 #include "oops/compressedOops.hpp" 46 #include "oops/constantPool.inline.hpp" 47 #include "oops/cpCache.inline.hpp" 48 #include "oops/objArrayOop.inline.hpp" 49 #include "oops/oop.inline.hpp" 50 #include "oops/resolvedFieldEntry.hpp" 51 #include "oops/resolvedIndyEntry.hpp" 52 #include "oops/resolvedMethodEntry.hpp" 53 #include "prims/methodHandles.hpp" 54 #include "runtime/arguments.hpp" 55 #include "runtime/atomic.hpp" 56 #include "runtime/handles.inline.hpp" 57 #include "runtime/mutexLocker.hpp" 58 #include "runtime/vm_version.hpp" 59 #include "utilities/macros.hpp" 60 61 // Implementation of ConstantPoolCache 62 63 template <class T> 64 static Array<T>* initialize_resolved_entries_array(ClassLoaderData* loader_data, GrowableArray<T> entries, TRAPS) { 65 Array<T>* resolved_entries; 66 if (entries.length() != 0) { 67 resolved_entries = MetadataFactory::new_array<T>(loader_data, entries.length(), CHECK_NULL); 68 for (int i = 0; i < entries.length(); i++) { 69 resolved_entries->at_put(i, entries.at(i)); 70 } 71 return resolved_entries; 72 } 73 return nullptr; 74 } 75 76 void ConstantPoolCache::set_direct_or_vtable_call(Bytecodes::Code invoke_code, 77 int method_index, 78 const methodHandle& method, 79 int vtable_index, 80 bool sender_is_interface) { 81 bool is_vtable_call = (vtable_index >= 0); // FIXME: split this method on this boolean 82 assert(method->interpreter_entry() != nullptr, "should have been set at this point"); 83 assert(!method->is_obsolete(), "attempt to write obsolete method to cpCache"); 84 85 int byte_no = -1; 86 bool change_to_virtual = false; 87 InstanceKlass* holder = nullptr; // have to declare this outside the switch 88 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 89 switch (invoke_code) { 90 case Bytecodes::_invokeinterface: 91 holder = method->method_holder(); 92 // check for private interface method invocations 93 if (vtable_index == Method::nonvirtual_vtable_index && holder->is_interface() ) { 94 assert(method->is_private(), "unexpected non-private method"); 95 assert(method->can_be_statically_bound(), "unexpected non-statically-bound method"); 96 97 method_entry->set_flags(( 1 << ResolvedMethodEntry::is_vfinal_shift) | 98 ((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift)); 99 method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters()); 100 assert(method_entry->is_vfinal(), "flags must be set"); 101 method_entry->set_method(method()); 102 byte_no = 2; 103 method_entry->set_klass(holder); 104 break; 105 } 106 else { 107 // We get here from InterpreterRuntime::resolve_invoke when an invokeinterface 108 // instruction links to a non-interface method (in Object). This can happen when 109 // an interface redeclares an Object method (like CharSequence declaring toString()) 110 // or when invokeinterface is used explicitly. 111 // In that case, the method has no itable index and must be invoked as a virtual. 112 // Set a flag to keep track of this corner case. 113 assert(holder->is_interface() || holder == vmClasses::Object_klass(), "unexpected holder class"); 114 assert(method->is_public(), "Calling non-public method in Object with invokeinterface"); 115 change_to_virtual = true; 116 117 // ...and fall through as if we were handling invokevirtual: 118 } 119 case Bytecodes::_invokevirtual: 120 { 121 if (!is_vtable_call) { 122 assert(method->can_be_statically_bound(), ""); 123 method_entry->set_flags(( 1 << ResolvedMethodEntry::is_vfinal_shift) | 124 ((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift) | 125 ((change_to_virtual ? 1 : 0) << ResolvedMethodEntry::is_forced_virtual_shift)); 126 method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters()); 127 assert(method_entry->is_vfinal(), "flags must be set"); 128 method_entry->set_method(method()); 129 } else { 130 assert(!method->can_be_statically_bound(), ""); 131 assert(vtable_index >= 0, "valid index"); 132 assert(!method->is_final_method(), "sanity"); 133 method_entry->set_flags((change_to_virtual ? 1 : 0) << ResolvedMethodEntry::is_forced_virtual_shift); 134 method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters()); 135 assert(!method_entry->is_vfinal(), "flags must not be set"); 136 method_entry->set_table_index(vtable_index); 137 } 138 byte_no = 2; 139 break; 140 } 141 142 case Bytecodes::_invokespecial: 143 case Bytecodes::_invokestatic: { 144 assert(!is_vtable_call, ""); 145 // Note: Read and preserve the value of the is_vfinal flag on any 146 // invokevirtual bytecode shared with this constant pool cache entry. 147 // It is cheap and safe to consult is_vfinal() at all times. 148 // Once is_vfinal is set, it must stay that way, lest we get a dangling oop. 149 bool vfinal = method_entry->is_vfinal(); 150 method_entry->set_flags(((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift)); 151 assert(vfinal == method_entry->is_vfinal(), "Vfinal flag must be preserved"); 152 method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters()); 153 method_entry->set_method(method()); 154 byte_no = 1; 155 break; 156 } 157 default: 158 ShouldNotReachHere(); 159 break; 160 } 161 162 // Note: byte_no also appears in TemplateTable::resolve. 163 if (byte_no == 1) { 164 assert(invoke_code != Bytecodes::_invokevirtual && 165 invoke_code != Bytecodes::_invokeinterface, ""); 166 bool do_resolve = true; 167 // Don't mark invokespecial to method as resolved if sender is an interface. The receiver 168 // has to be checked that it is a subclass of the current class every time this bytecode 169 // is executed. 170 if (invoke_code == Bytecodes::_invokespecial && sender_is_interface && 171 method->name() != vmSymbols::object_initializer_name()) { 172 do_resolve = false; 173 } 174 if (invoke_code == Bytecodes::_invokestatic) { 175 assert(method->method_holder()->is_initialized() || 176 method->method_holder()->is_init_thread(JavaThread::current()), 177 "invalid class initialization state for invoke_static"); 178 179 if (!VM_Version::supports_fast_class_init_checks() && method->needs_clinit_barrier()) { 180 // Don't mark invokestatic to method as resolved if the holder class has not yet completed 181 // initialization. An invokestatic must only proceed if the class is initialized, but if 182 // we resolve it before then that class initialization check is skipped. 183 // 184 // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true), 185 // template interpreter supports fast class initialization check for 186 // invokestatic which doesn't require call site re-resolution to 187 // enforce class initialization barrier. 188 do_resolve = false; 189 } 190 } 191 if (do_resolve) { 192 method_entry->set_bytecode1(invoke_code); 193 } 194 } else if (byte_no == 2) { 195 if (change_to_virtual) { 196 assert(invoke_code == Bytecodes::_invokeinterface, ""); 197 // NOTE: THIS IS A HACK - BE VERY CAREFUL!!! 198 // 199 // Workaround for the case where we encounter an invokeinterface, but we 200 // should really have an _invokevirtual since the resolved method is a 201 // virtual method in java.lang.Object. This is a corner case in the spec 202 // but is presumably legal. javac does not generate this code. 203 // 204 // We do not set bytecode_1() to _invokeinterface, because that is the 205 // bytecode # used by the interpreter to see if it is resolved. In this 206 // case, the method gets reresolved with caller for each interface call 207 // because the actual selected method may not be public. 208 // 209 // We set bytecode_2() to _invokevirtual. 210 // See also interpreterRuntime.cpp. (8/25/2000) 211 invoke_code = Bytecodes::_invokevirtual; 212 } else { 213 assert(invoke_code == Bytecodes::_invokevirtual || 214 (invoke_code == Bytecodes::_invokeinterface && 215 ((method->is_private() || 216 (method->is_final() && method->method_holder() == vmClasses::Object_klass())))), 217 "unexpected invocation mode"); 218 if (invoke_code == Bytecodes::_invokeinterface && 219 (method->is_private() || method->is_final())) { 220 // We set bytecode_1() to _invokeinterface, because that is the 221 // bytecode # used by the interpreter to see if it is resolved. 222 // We set bytecode_2() to _invokevirtual. 223 method_entry->set_bytecode1(invoke_code); 224 } 225 } 226 // set up for invokevirtual, even if linking for invokeinterface also: 227 method_entry->set_bytecode2(invoke_code); 228 } else { 229 ShouldNotReachHere(); 230 } 231 } 232 233 void ConstantPoolCache::set_direct_call(Bytecodes::Code invoke_code, int method_index, const methodHandle& method, 234 bool sender_is_interface) { 235 int index = Method::nonvirtual_vtable_index; 236 // index < 0; FIXME: inline and customize set_direct_or_vtable_call 237 set_direct_or_vtable_call(invoke_code, method_index, method, index, sender_is_interface); 238 } 239 240 void ConstantPoolCache::set_vtable_call(Bytecodes::Code invoke_code, int method_index, const methodHandle& method, int index) { 241 // either the method is a miranda or its holder should accept the given index 242 assert(method->method_holder()->is_interface() || method->method_holder()->verify_vtable_index(index), ""); 243 // index >= 0; FIXME: inline and customize set_direct_or_vtable_call 244 set_direct_or_vtable_call(invoke_code, method_index, method, index, false); 245 } 246 247 void ConstantPoolCache::set_itable_call(Bytecodes::Code invoke_code, 248 int method_index, 249 Klass* referenced_klass, 250 const methodHandle& method, int index) { 251 assert(method->method_holder()->verify_itable_index(index), ""); 252 assert(invoke_code == Bytecodes::_invokeinterface, ""); 253 InstanceKlass* interf = method->method_holder(); 254 assert(interf->is_interface(), "must be an interface"); 255 assert(!method->is_final_method(), "interfaces do not have final methods; cannot link to one here"); 256 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 257 method_entry->set_klass(static_cast<InstanceKlass*>(referenced_klass)); 258 method_entry->set_method(method()); 259 method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters()); 260 method_entry->set_bytecode1(Bytecodes::_invokeinterface); 261 } 262 263 ResolvedMethodEntry* ConstantPoolCache::set_method_handle(int method_index, const CallInfo &call_info) { 264 // NOTE: This method entry can be the subject of data races. 265 // There are three words to update: flags, refs[appendix_index], method (in that order). 266 // Writers must store all other values before method. 267 // Readers must test the method first for non-null before reading other fields. 268 // Competing writers must acquire exclusive access via a lock. 269 // A losing writer waits on the lock until the winner writes the method and leaves 270 // the lock, so that when the losing writer returns, he can use the linked 271 // cache entry. 272 // Lock fields to write 273 Bytecodes::Code invoke_code = Bytecodes::_invokehandle; 274 MutexLocker ml(constant_pool()->pool_holder()->init_monitor()); 275 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 276 277 if (method_entry->is_resolved(invoke_code)) { 278 return method_entry; 279 } 280 281 Method* adapter = call_info.resolved_method(); 282 const Handle appendix = call_info.resolved_appendix(); 283 const bool has_appendix = appendix.not_null(); 284 285 // Write the flags. 286 // MHs are always sig-poly and have a local signature. 287 method_entry->fill_in((u1)as_TosState(adapter->result_type()), (u2)adapter->size_of_parameters()); 288 method_entry->set_flags(((has_appendix ? 1 : 0) << ResolvedMethodEntry::has_appendix_shift ) | 289 ( 1 << ResolvedMethodEntry::has_local_signature_shift ) | 290 ( 1 << ResolvedMethodEntry::is_final_shift )); 291 292 // Method handle invokes use both a method and a resolved references index. 293 // refs[appendix_index], if not null, contains a value passed as a trailing argument to the adapter. 294 // In the general case, this could be the call site's MethodType, 295 // for use with java.lang.Invokers.checkExactType, or else a CallSite object. 296 // method_entry->method() contains the adapter method which manages the actual call. 297 // In the general case, this is a compiled LambdaForm. 298 // (The Java code is free to optimize these calls by binding other 299 // sorts of methods and appendices to call sites.) 300 // JVM-level linking is via the method, as if for invokespecial, and signatures are erased. 301 // The appendix argument (if any) is added to the signature, and is counted in the parameter_size bits. 302 // Even with the appendix, the method will never take more than 255 parameter slots. 303 // 304 // This means that given a call site like (List)mh.invoke("foo"), 305 // the method has signature '(Ljl/Object;Ljl/invoke/MethodType;)Ljl/Object;', 306 // not '(Ljava/lang/String;)Ljava/util/List;'. 307 // The fact that String and List are involved is encoded in the MethodType in refs[appendix_index]. 308 // This allows us to create fewer Methods, while keeping type safety. 309 // 310 311 // Store appendix, if any. 312 if (has_appendix) { 313 const int appendix_index = method_entry->resolved_references_index(); 314 objArrayOop resolved_references = constant_pool()->resolved_references(); 315 assert(appendix_index >= 0 && appendix_index < resolved_references->length(), "oob"); 316 assert(resolved_references->obj_at(appendix_index) == nullptr, "init just once"); 317 resolved_references->obj_at_put(appendix_index, appendix()); 318 } 319 320 method_entry->set_method(adapter); // This must be the last one to set (see NOTE above)! 321 322 // The interpreter assembly code does not check byte_2, 323 // but it is used by is_resolved, method_if_resolved, etc. 324 method_entry->set_bytecode1(invoke_code); 325 326 assert(has_appendix == method_entry->has_appendix(), "proper storage of appendix flag"); 327 assert(method_entry->has_local_signature(), "proper storage of signature flag"); 328 return method_entry; 329 } 330 331 Method* ConstantPoolCache::method_if_resolved(int method_index) const { 332 // Decode the action of set_method and set_interface_call 333 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 334 335 Bytecodes::Code invoke_code = (Bytecodes::Code)method_entry->bytecode1(); 336 switch (invoke_code) { 337 case Bytecodes::_invokeinterface: 338 case Bytecodes::_invokestatic: 339 case Bytecodes::_invokespecial: 340 assert(!method_entry->has_appendix(), ""); 341 // fall through 342 case Bytecodes::_invokehandle: 343 return method_entry->method(); 344 case Bytecodes::_invokedynamic: 345 ShouldNotReachHere(); 346 default: 347 assert(invoke_code == (Bytecodes::Code)0, "unexpected bytecode"); 348 break; 349 } 350 351 invoke_code = (Bytecodes::Code)method_entry->bytecode2(); 352 if (invoke_code == Bytecodes::_invokevirtual) { 353 if (method_entry->is_vfinal()) { 354 return method_entry->method(); 355 } else { 356 int holder_index = constant_pool()->uncached_klass_ref_index_at(method_entry->constant_pool_index()); 357 if (constant_pool()->tag_at(holder_index).is_klass()) { 358 Klass* klass = constant_pool()->resolved_klass_at(holder_index); 359 return klass->method_at_vtable(method_entry->table_index()); 360 } 361 } 362 } 363 return nullptr; 364 } 365 366 ConstantPoolCache* ConstantPoolCache::allocate(ClassLoaderData* loader_data, 367 const intStack& invokedynamic_map, 368 const GrowableArray<ResolvedIndyEntry> indy_entries, 369 const GrowableArray<ResolvedFieldEntry> field_entries, 370 const GrowableArray<ResolvedMethodEntry> method_entries, 371 TRAPS) { 372 373 int size = ConstantPoolCache::size(); 374 375 // Initialize resolved entry arrays with available data 376 Array<ResolvedFieldEntry>* resolved_field_entries = initialize_resolved_entries_array(loader_data, field_entries, CHECK_NULL); 377 Array<ResolvedIndyEntry>* resolved_indy_entries = initialize_resolved_entries_array(loader_data, indy_entries, CHECK_NULL); 378 Array<ResolvedMethodEntry>* resolved_method_entries = initialize_resolved_entries_array(loader_data, method_entries, CHECK_NULL); 379 380 return new (loader_data, size, MetaspaceObj::ConstantPoolCacheType, THREAD) 381 ConstantPoolCache(invokedynamic_map, resolved_indy_entries, resolved_field_entries, resolved_method_entries); 382 } 383 384 // Record the GC marking cycle when redefined vs. when found in the loom stack chunks. 385 void ConstantPoolCache::record_gc_epoch() { 386 _gc_epoch = CodeCache::gc_epoch(); 387 } 388 389 #if INCLUDE_CDS 390 void ConstantPoolCache::remove_unshareable_info() { 391 assert(CDSConfig::is_dumping_archive(), "sanity"); 392 // <this> is the copy to be written into the archive. It's in the ArchiveBuilder's "buffer space". 393 // However, this->_initial_entries was not copied/relocated by the ArchiveBuilder, so it's 394 // still pointing to the array allocated inside save_for_archive(). 395 if (_resolved_indy_entries != nullptr) { 396 for (int i = 0; i < _resolved_indy_entries->length(); i++) { 397 resolved_indy_entry_at(i)->remove_unshareable_info(); 398 } 399 } 400 if (_resolved_field_entries != nullptr) { 401 for (int i = 0; i < _resolved_field_entries->length(); i++) { 402 resolved_field_entry_at(i)->remove_unshareable_info(); 403 } 404 } 405 if (_resolved_method_entries != nullptr) { 406 for (int i = 0; i < _resolved_method_entries->length(); i++) { 407 resolved_method_entry_at(i)->remove_unshareable_info(); 408 } 409 } 410 } 411 #endif // INCLUDE_CDS 412 413 void ConstantPoolCache::deallocate_contents(ClassLoaderData* data) { 414 assert(!is_shared(), "shared caches are not deallocated"); 415 data->remove_handle(_resolved_references); 416 set_resolved_references(OopHandle()); 417 MetadataFactory::free_array<u2>(data, _reference_map); 418 set_reference_map(nullptr); 419 #if INCLUDE_CDS 420 if (_resolved_indy_entries != nullptr) { 421 MetadataFactory::free_array<ResolvedIndyEntry>(data, _resolved_indy_entries); 422 _resolved_indy_entries = nullptr; 423 } 424 if (_resolved_field_entries != nullptr) { 425 MetadataFactory::free_array<ResolvedFieldEntry>(data, _resolved_field_entries); 426 _resolved_field_entries = nullptr; 427 } 428 if (_resolved_method_entries != nullptr) { 429 MetadataFactory::free_array<ResolvedMethodEntry>(data, _resolved_method_entries); 430 _resolved_method_entries = nullptr; 431 } 432 #endif 433 } 434 435 #if INCLUDE_CDS_JAVA_HEAP 436 oop ConstantPoolCache::archived_references() { 437 if (_archived_references_index < 0) { 438 return nullptr; 439 } 440 return HeapShared::get_root(_archived_references_index); 441 } 442 443 void ConstantPoolCache::clear_archived_references() { 444 if (_archived_references_index >= 0) { 445 HeapShared::clear_root(_archived_references_index); 446 _archived_references_index = -1; 447 } 448 } 449 450 void ConstantPoolCache::set_archived_references(int root_index) { 451 assert(CDSConfig::is_dumping_heap(), "sanity"); 452 _archived_references_index = root_index; 453 } 454 #endif 455 456 #if INCLUDE_JVMTI 457 void log_adjust(const char* entry_type, Method* old_method, Method* new_method, bool* trace_name_printed) { 458 ResourceMark rm; 459 460 if (!(*trace_name_printed)) { 461 log_info(redefine, class, update)("adjust: name=%s", old_method->method_holder()->external_name()); 462 *trace_name_printed = true; 463 } 464 log_trace(redefine, class, update, constantpool) 465 ("cpc %s entry update: %s", entry_type, new_method->external_name()); 466 } 467 468 // RedefineClasses() API support: 469 // If any entry of this ConstantPoolCache points to any of 470 // old_methods, replace it with the corresponding new_method. 471 void ConstantPoolCache::adjust_method_entries(bool * trace_name_printed) { 472 if (_resolved_indy_entries != nullptr) { 473 for (int j = 0; j < _resolved_indy_entries->length(); j++) { 474 Method* old_method = resolved_indy_entry_at(j)->method(); 475 if (old_method == nullptr || !old_method->is_old()) { 476 continue; 477 } 478 Method* new_method = old_method->get_new_method(); 479 resolved_indy_entry_at(j)->adjust_method_entry(new_method); 480 log_adjust("indy", old_method, new_method, trace_name_printed); 481 } 482 } 483 if (_resolved_method_entries != nullptr) { 484 for (int i = 0; i < _resolved_method_entries->length(); i++) { 485 ResolvedMethodEntry* method_entry = resolved_method_entry_at(i); 486 // get interesting method entry 487 Method* old_method = method_entry->method(); 488 if (old_method == nullptr || !old_method->is_old()) { 489 continue; // skip uninteresting entries 490 } 491 if (old_method->is_deleted()) { 492 // clean up entries with deleted methods 493 method_entry->reset_entry(); 494 continue; 495 } 496 Method* new_method = old_method->get_new_method(); 497 method_entry->adjust_method_entry(new_method); 498 log_adjust("non-indy", old_method, new_method, trace_name_printed); 499 } 500 } 501 } 502 503 // the constant pool cache should never contain old or obsolete methods 504 bool ConstantPoolCache::check_no_old_or_obsolete_entries() { 505 ResourceMark rm; 506 if (_resolved_indy_entries != nullptr) { 507 for (int i = 0; i < _resolved_indy_entries->length(); i++) { 508 Method* m = resolved_indy_entry_at(i)->method(); 509 if (m != nullptr && !resolved_indy_entry_at(i)->check_no_old_or_obsolete_entry()) { 510 log_trace(redefine, class, update, constantpool) 511 ("cpcache check found old method entry: class: %s, old: %d, obsolete: %d, method: %s", 512 constant_pool()->pool_holder()->external_name(), m->is_old(), m->is_obsolete(), m->external_name()); 513 return false; 514 } 515 } 516 } 517 if (_resolved_method_entries != nullptr) { 518 for (int i = 0; i < _resolved_method_entries->length(); i++) { 519 ResolvedMethodEntry* method_entry = resolved_method_entry_at(i); 520 Method* m = method_entry->method(); 521 if (m != nullptr && !method_entry->check_no_old_or_obsolete_entry()) { 522 log_trace(redefine, class, update, constantpool) 523 ("cpcache check found old method entry: class: %s, old: %d, obsolete: %d, method: %s", 524 constant_pool()->pool_holder()->external_name(), m->is_old(), m->is_obsolete(), m->external_name()); 525 return false; 526 } 527 } 528 } 529 return true; 530 } 531 532 void ConstantPoolCache::dump_cache() { 533 print_on(tty); 534 } 535 #endif // INCLUDE_JVMTI 536 537 void ConstantPoolCache::metaspace_pointers_do(MetaspaceClosure* it) { 538 log_trace(cds)("Iter(ConstantPoolCache): %p", this); 539 it->push(&_constant_pool); 540 it->push(&_reference_map); 541 if (_resolved_indy_entries != nullptr) { 542 it->push(&_resolved_indy_entries, MetaspaceClosure::_writable); 543 } 544 if (_resolved_field_entries != nullptr) { 545 it->push(&_resolved_field_entries, MetaspaceClosure::_writable); 546 } 547 if (_resolved_method_entries != nullptr) { 548 it->push(&_resolved_method_entries, MetaspaceClosure::_writable); 549 } 550 } 551 552 bool ConstantPoolCache::save_and_throw_indy_exc( 553 const constantPoolHandle& cpool, int cpool_index, int index, constantTag tag, TRAPS) { 554 555 assert(HAS_PENDING_EXCEPTION, "No exception got thrown!"); 556 assert(PENDING_EXCEPTION->is_a(vmClasses::LinkageError_klass()), 557 "No LinkageError exception"); 558 559 MutexLocker ml(THREAD, cpool->pool_holder()->init_monitor()); 560 561 // if the indy_info is resolved or the indy_resolution_failed flag is set then another 562 // thread either succeeded in resolving the method or got a LinkageError 563 // exception, before this thread was able to record its failure. So, clear 564 // this thread's exception and return false so caller can use the earlier 565 // thread's result. 566 if (resolved_indy_entry_at(index)->is_resolved() || resolved_indy_entry_at(index)->resolution_failed()) { 567 CLEAR_PENDING_EXCEPTION; 568 return false; 569 } 570 571 Symbol* error = PENDING_EXCEPTION->klass()->name(); 572 Symbol* message = java_lang_Throwable::detail_message(PENDING_EXCEPTION); 573 574 int encoded_index = ResolutionErrorTable::encode_indy_index( 575 ConstantPool::encode_invokedynamic_index(index)); 576 SystemDictionary::add_resolution_error(cpool, encoded_index, error, message); 577 resolved_indy_entry_at(index)->set_resolution_failed(); 578 return true; 579 } 580 581 oop ConstantPoolCache::set_dynamic_call(const CallInfo &call_info, int index) { 582 ResourceMark rm; 583 MutexLocker ml(constant_pool()->pool_holder()->init_monitor()); 584 assert(index >= 0, "Indy index must be positive at this point"); 585 586 if (resolved_indy_entry_at(index)->method() != nullptr) { 587 return constant_pool()->resolved_reference_from_indy(index); 588 } 589 590 if (resolved_indy_entry_at(index)->resolution_failed()) { 591 // Before we got here, another thread got a LinkageError exception during 592 // resolution. Ignore our success and throw their exception. 593 guarantee(index >= 0, "Invalid indy index"); 594 int encoded_index = ResolutionErrorTable::encode_indy_index( 595 ConstantPool::encode_invokedynamic_index(index)); 596 JavaThread* THREAD = JavaThread::current(); // For exception macros. 597 constantPoolHandle cp(THREAD, constant_pool()); 598 ConstantPool::throw_resolution_error(cp, encoded_index, THREAD); 599 return nullptr; 600 } 601 602 Method* adapter = call_info.resolved_method(); 603 const Handle appendix = call_info.resolved_appendix(); 604 const bool has_appendix = appendix.not_null(); 605 606 LogStream* log_stream = nullptr; 607 LogStreamHandle(Debug, methodhandles, indy) lsh_indy; 608 if (lsh_indy.is_enabled()) { 609 ResourceMark rm; 610 log_stream = &lsh_indy; 611 log_stream->print_cr("set_method_handle bc=%d appendix=" PTR_FORMAT "%s method=" PTR_FORMAT " (local signature) ", 612 0xba, 613 p2i(appendix()), 614 (has_appendix ? "" : " (unused)"), 615 p2i(adapter)); 616 adapter->print_on(log_stream); 617 if (has_appendix) appendix()->print_on(log_stream); 618 } 619 620 if (has_appendix) { 621 const int appendix_index = resolved_indy_entry_at(index)->resolved_references_index(); 622 objArrayOop resolved_references = constant_pool()->resolved_references(); 623 assert(appendix_index >= 0 && appendix_index < resolved_references->length(), "oob"); 624 assert(resolved_references->obj_at(appendix_index) == nullptr, "init just once"); 625 resolved_references->obj_at_put(appendix_index, appendix()); 626 } 627 628 // Populate entry with resolved information 629 assert(resolved_indy_entries() != nullptr, "Invokedynamic array is empty, cannot fill with resolved information"); 630 resolved_indy_entry_at(index)->fill_in(adapter, adapter->size_of_parameters(), as_TosState(adapter->result_type()), has_appendix); 631 632 if (log_stream != nullptr) { 633 resolved_indy_entry_at(index)->print_on(log_stream); 634 } 635 return appendix(); 636 } 637 638 oop ConstantPoolCache::appendix_if_resolved(int method_index) const { 639 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 640 return appendix_if_resolved(method_entry); 641 } 642 643 oop ConstantPoolCache::appendix_if_resolved(ResolvedMethodEntry* method_entry) const { 644 if (!method_entry->has_appendix()) 645 return nullptr; 646 const int ref_index = method_entry->resolved_references_index(); 647 return constant_pool()->resolved_reference_at(ref_index); 648 } 649 650 // Printing 651 652 void ConstantPoolCache::print_on(outputStream* st) const { 653 st->print_cr("%s", internal_name()); 654 // print constant pool cache entries 655 print_resolved_field_entries(st); 656 print_resolved_method_entries(st); 657 print_resolved_indy_entries(st); 658 } 659 660 void ConstantPoolCache::print_resolved_field_entries(outputStream* st) const { 661 for (int field_index = 0; field_index < resolved_field_entries_length(); field_index++) { 662 resolved_field_entry_at(field_index)->print_on(st); 663 } 664 } 665 666 void ConstantPoolCache::print_resolved_method_entries(outputStream* st) const { 667 for (int method_index = 0; method_index < resolved_method_entries_length(); method_index++) { 668 ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index); 669 method_entry->print_on(st); 670 if (method_entry->has_appendix()) { 671 st->print(" appendix: "); 672 constant_pool()->resolved_reference_from_method(method_index)->print_on(st); 673 } 674 } 675 } 676 677 void ConstantPoolCache::print_resolved_indy_entries(outputStream* st) const { 678 for (int indy_index = 0; indy_index < resolved_indy_entries_length(); indy_index++) { 679 ResolvedIndyEntry* indy_entry = resolved_indy_entry_at(indy_index); 680 indy_entry->print_on(st); 681 if (indy_entry->has_appendix()) { 682 st->print(" appendix: "); 683 constant_pool()->resolved_reference_from_indy(indy_index)->print_on(st); 684 } 685 } 686 }