1 /* 2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "cds/cdsConfig.hpp" 26 #include "cds/cppVtables.hpp" 27 #include "cds/metaspaceShared.hpp" 28 #include "classfile/classLoader.hpp" 29 #include "classfile/classLoaderDataGraph.hpp" 30 #include "classfile/metadataOnStackMark.hpp" 31 #include "classfile/symbolTable.hpp" 32 #include "classfile/systemDictionary.hpp" 33 #include "classfile/vmClasses.hpp" 34 #include "code/aotCodeCache.hpp" 35 #include "code/codeCache.hpp" 36 #include "code/debugInfoRec.hpp" 37 #include "compiler/compilationPolicy.hpp" 38 #include "gc/shared/collectedHeap.inline.hpp" 39 #include "interpreter/bytecodeStream.hpp" 40 #include "interpreter/bytecodeTracer.hpp" 41 #include "interpreter/bytecodes.hpp" 42 #include "interpreter/interpreter.hpp" 43 #include "interpreter/oopMapCache.hpp" 44 #include "logging/log.hpp" 45 #include "logging/logStream.hpp" 46 #include "logging/logTag.hpp" 47 #include "memory/allocation.inline.hpp" 48 #include "memory/metadataFactory.hpp" 49 #include "memory/metaspaceClosure.hpp" 50 #include "memory/oopFactory.hpp" 51 #include "memory/resourceArea.hpp" 52 #include "memory/universe.hpp" 53 #include "nmt/memTracker.hpp" 54 #include "oops/constMethod.hpp" 55 #include "oops/constantPool.hpp" 56 #include "oops/jmethodIDTable.hpp" 57 #include "oops/klass.inline.hpp" 58 #include "oops/method.inline.hpp" 59 #include "oops/methodData.hpp" 60 #include "oops/objArrayKlass.hpp" 61 #include "oops/objArrayOop.inline.hpp" 62 #include "oops/oop.inline.hpp" 63 #include "oops/symbol.hpp" 64 #include "oops/inlineKlass.inline.hpp" 65 #include "oops/trainingData.hpp" 66 #include "prims/jvmtiExport.hpp" 67 #include "prims/methodHandles.hpp" 68 #include "runtime/atomic.hpp" 69 #include "runtime/arguments.hpp" 70 #include "runtime/continuationEntry.hpp" 71 #include "runtime/frame.inline.hpp" 72 #include "runtime/handles.inline.hpp" 73 #include "runtime/init.hpp" 74 #include "runtime/java.hpp" 75 #include "runtime/orderAccess.hpp" 76 #include "runtime/perfData.hpp" 77 #include "runtime/relocator.hpp" 78 #include "runtime/safepointVerifiers.hpp" 79 #include "runtime/sharedRuntime.hpp" 80 #include "runtime/signature.hpp" 81 #include "runtime/threads.hpp" 82 #include "runtime/vm_version.hpp" 83 #include "utilities/align.hpp" 84 #include "utilities/quickSort.hpp" 85 #include "utilities/vmError.hpp" 86 #include "utilities/xmlstream.hpp" 87 88 // Implementation of Method 89 90 Method* Method::allocate(ClassLoaderData* loader_data, 91 int byte_code_size, 92 AccessFlags access_flags, 93 InlineTableSizes* sizes, 94 ConstMethod::MethodType method_type, 95 Symbol* name, 96 TRAPS) { 97 assert(!access_flags.is_native() || byte_code_size == 0, 98 "native methods should not contain byte codes"); 99 ConstMethod* cm = ConstMethod::allocate(loader_data, 100 byte_code_size, 101 sizes, 102 method_type, 103 CHECK_NULL); 104 int size = Method::size(access_flags.is_native()); 105 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name); 106 } 107 108 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) { 109 NoSafepointVerifier no_safepoint; 110 set_constMethod(xconst); 111 set_access_flags(access_flags); 112 set_intrinsic_id(vmIntrinsics::_none); 113 clear_method_data(); 114 clear_method_counters(); 115 set_vtable_index(Method::garbage_vtable_index); 116 117 // Fix and bury in Method* 118 set_interpreter_entry(nullptr); // sets i2i entry and from_int 119 set_adapter_entry(nullptr); 120 Method::clear_code(); // from_c/from_i get set to c2i/i2i 121 122 if (access_flags.is_native()) { 123 clear_native_function(); 124 set_signature_handler(nullptr); 125 } 126 NOT_PRODUCT(set_compiled_invocation_count(0);) 127 // Name is very useful for debugging. 128 NOT_PRODUCT(_name = name;) 129 } 130 131 // Release Method*. The nmethod will be gone when we get here because 132 // we've walked the code cache. 133 void Method::deallocate_contents(ClassLoaderData* loader_data) { 134 MetadataFactory::free_metadata(loader_data, constMethod()); 135 set_constMethod(nullptr); 136 MetadataFactory::free_metadata(loader_data, method_data()); 137 clear_method_data(); 138 MetadataFactory::free_metadata(loader_data, method_counters()); 139 clear_method_counters(); 140 set_adapter_entry(nullptr); 141 // The nmethod will be gone when we get here. 142 if (code() != nullptr) _code = nullptr; 143 } 144 145 void Method::release_C_heap_structures() { 146 if (method_data()) { 147 method_data()->release_C_heap_structures(); 148 149 // Destroy MethodData embedded lock 150 method_data()->~MethodData(); 151 } 152 } 153 154 address Method::get_i2c_entry() { 155 assert(adapter() != nullptr, "must have"); 156 return adapter()->get_i2c_entry(); 157 } 158 159 address Method::get_c2i_entry() { 160 assert(adapter() != nullptr, "must have"); 161 return adapter()->get_c2i_entry(); 162 } 163 164 address Method::get_c2i_inline_entry() { 165 assert(adapter() != nullptr, "must have"); 166 return adapter()->get_c2i_inline_entry(); 167 } 168 169 address Method::get_c2i_unverified_entry() { 170 assert(adapter() != nullptr, "must have"); 171 return adapter()->get_c2i_unverified_entry(); 172 } 173 174 address Method::get_c2i_unverified_inline_entry() { 175 assert(adapter() != nullptr, "must have"); 176 return adapter()->get_c2i_unverified_inline_entry(); 177 } 178 179 address Method::get_c2i_no_clinit_check_entry() { 180 assert(VM_Version::supports_fast_class_init_checks(), ""); 181 assert(adapter() != nullptr, "must have"); 182 return adapter()->get_c2i_no_clinit_check_entry(); 183 } 184 185 char* Method::name_and_sig_as_C_string() const { 186 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature()); 187 } 188 189 char* Method::name_and_sig_as_C_string(char* buf, int size) const { 190 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size); 191 } 192 193 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) { 194 const char* klass_name = klass->external_name(); 195 int klass_name_len = (int)strlen(klass_name); 196 int method_name_len = method_name->utf8_length(); 197 int len = klass_name_len + 1 + method_name_len + signature->utf8_length(); 198 char* dest = NEW_RESOURCE_ARRAY(char, len + 1); 199 strcpy(dest, klass_name); 200 dest[klass_name_len] = '.'; 201 strcpy(&dest[klass_name_len + 1], method_name->as_C_string()); 202 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string()); 203 dest[len] = 0; 204 return dest; 205 } 206 207 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) { 208 Symbol* klass_name = klass->name(); 209 klass_name->as_klass_external_name(buf, size); 210 int len = (int)strlen(buf); 211 212 if (len < size - 1) { 213 buf[len++] = '.'; 214 215 method_name->as_C_string(&(buf[len]), size - len); 216 len = (int)strlen(buf); 217 218 signature->as_C_string(&(buf[len]), size - len); 219 } 220 221 return buf; 222 } 223 224 const char* Method::external_name() const { 225 return external_name(constants()->pool_holder(), name(), signature()); 226 } 227 228 void Method::print_external_name(outputStream *os) const { 229 print_external_name(os, constants()->pool_holder(), name(), signature()); 230 } 231 232 const char* Method::external_name(Klass* klass, Symbol* method_name, Symbol* signature) { 233 stringStream ss; 234 print_external_name(&ss, klass, method_name, signature); 235 return ss.as_string(); 236 } 237 238 void Method::print_external_name(outputStream *os, Klass* klass, Symbol* method_name, Symbol* signature) { 239 signature->print_as_signature_external_return_type(os); 240 os->print(" %s.%s(", klass->external_name(), method_name->as_C_string()); 241 signature->print_as_signature_external_parameters(os); 242 os->print(")"); 243 } 244 245 int Method::fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS) { 246 if (log_is_enabled(Debug, exceptions)) { 247 ResourceMark rm(THREAD); 248 log_debug(exceptions)("Looking for catch handler for exception of type \"%s\" in method \"%s\"", 249 ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string()); 250 } 251 // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index) 252 // access exception table 253 ExceptionTable table(mh()); 254 int length = table.length(); 255 // iterate through all entries sequentially 256 constantPoolHandle pool(THREAD, mh->constants()); 257 for (int i = 0; i < length; i ++) { 258 //reacquire the table in case a GC happened 259 ExceptionTable table(mh()); 260 int beg_bci = table.start_pc(i); 261 int end_bci = table.end_pc(i); 262 assert(beg_bci <= end_bci, "inconsistent exception table"); 263 log_debug(exceptions)(" - checking exception table entry for BCI %d to %d", 264 beg_bci, end_bci); 265 266 if (beg_bci <= throw_bci && throw_bci < end_bci) { 267 // exception handler bci range covers throw_bci => investigate further 268 log_debug(exceptions)(" - entry covers throw point BCI %d", throw_bci); 269 270 int handler_bci = table.handler_pc(i); 271 int klass_index = table.catch_type_index(i); 272 if (klass_index == 0) { 273 if (log_is_enabled(Info, exceptions)) { 274 ResourceMark rm(THREAD); 275 log_info(exceptions)("Found catch-all handler for exception of type \"%s\" in method \"%s\" at BCI: %d", 276 ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string(), handler_bci); 277 } 278 return handler_bci; 279 } else if (ex_klass == nullptr) { 280 // Is this even possible? 281 if (log_is_enabled(Info, exceptions)) { 282 ResourceMark rm(THREAD); 283 log_info(exceptions)("null exception class is implicitly caught by handler in method \"%s\" at BCI: %d", 284 mh()->name()->as_C_string(), handler_bci); 285 } 286 return handler_bci; 287 } else { 288 if (log_is_enabled(Debug, exceptions)) { 289 ResourceMark rm(THREAD); 290 log_debug(exceptions)(" - resolving catch type \"%s\"", 291 pool->klass_name_at(klass_index)->as_C_string()); 292 } 293 // we know the exception class => get the constraint class 294 // this may require loading of the constraint class; if verification 295 // fails or some other exception occurs, return handler_bci 296 Klass* k = pool->klass_at(klass_index, THREAD); 297 if (HAS_PENDING_EXCEPTION) { 298 if (log_is_enabled(Debug, exceptions)) { 299 ResourceMark rm(THREAD); 300 log_debug(exceptions)(" - exception \"%s\" occurred resolving catch type", 301 PENDING_EXCEPTION->klass()->external_name()); 302 } 303 return handler_bci; 304 } 305 assert(k != nullptr, "klass not loaded"); 306 if (ex_klass->is_subtype_of(k)) { 307 if (log_is_enabled(Info, exceptions)) { 308 ResourceMark rm(THREAD); 309 log_info(exceptions)("Found matching handler for exception of type \"%s\" in method \"%s\" at BCI: %d", 310 ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string(), handler_bci); 311 } 312 return handler_bci; 313 } 314 } 315 } 316 } 317 318 if (log_is_enabled(Debug, exceptions)) { 319 ResourceMark rm(THREAD); 320 log_debug(exceptions)("No catch handler found for exception of type \"%s\" in method \"%s\"", 321 ex_klass->external_name(), mh->name()->as_C_string()); 322 } 323 324 return -1; 325 } 326 327 void Method::mask_for(int bci, InterpreterOopMap* mask) { 328 methodHandle h_this(Thread::current(), this); 329 mask_for(h_this, bci, mask); 330 } 331 332 void Method::mask_for(const methodHandle& this_mh, int bci, InterpreterOopMap* mask) { 333 assert(this_mh() == this, "Sanity"); 334 method_holder()->mask_for(this_mh, bci, mask); 335 } 336 337 int Method::bci_from(address bcp) const { 338 if (is_native() && bcp == nullptr) { 339 return 0; 340 } 341 // Do not have a ResourceMark here because AsyncGetCallTrace stack walking code 342 // may call this after interrupting a nested ResourceMark. 343 assert((is_native() && bcp == code_base()) || contains(bcp) || VMError::is_error_reported(), 344 "bcp doesn't belong to this method. bcp: " PTR_FORMAT, p2i(bcp)); 345 346 return int(bcp - code_base()); 347 } 348 349 350 int Method::validate_bci(int bci) const { 351 // Called from the verifier, and should return -1 if not valid. 352 return ((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size())) ? bci : -1; 353 } 354 355 // Return bci if it appears to be a valid bcp 356 // Return -1 otherwise. 357 // Used by profiling code, when invalid data is a possibility. 358 // The caller is responsible for validating the Method* itself. 359 int Method::validate_bci_from_bcp(address bcp) const { 360 // keep bci as -1 if not a valid bci 361 int bci = -1; 362 if (bcp == nullptr || bcp == code_base()) { 363 // code_size() may return 0 and we allow 0 here 364 // the method may be native 365 bci = 0; 366 } else if (contains(bcp)) { 367 bci = int(bcp - code_base()); 368 } 369 // Assert that if we have dodged any asserts, bci is negative. 370 assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0"); 371 return bci; 372 } 373 374 address Method::bcp_from(int bci) const { 375 assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), 376 "illegal bci: %d for %s method", bci, is_native() ? "native" : "non-native"); 377 address bcp = code_base() + bci; 378 assert((is_native() && bcp == code_base()) || contains(bcp), "bcp doesn't belong to this method"); 379 return bcp; 380 } 381 382 address Method::bcp_from(address bcp) const { 383 if (is_native() && bcp == nullptr) { 384 return code_base(); 385 } else { 386 return bcp; 387 } 388 } 389 390 int Method::size(bool is_native) { 391 // If native, then include pointers for native_function and signature_handler 392 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0; 393 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord; 394 return align_metadata_size(header_size() + extra_words); 395 } 396 397 Symbol* Method::klass_name() const { 398 return method_holder()->name(); 399 } 400 401 void Method::metaspace_pointers_do(MetaspaceClosure* it) { 402 log_trace(aot)("Iter(Method): %p", this); 403 404 if (!method_holder()->is_rewritten() || CDSConfig::is_valhalla_preview()) { 405 it->push(&_constMethod, MetaspaceClosure::_writable); 406 } else { 407 it->push(&_constMethod); 408 } 409 it->push(&_adapter); 410 it->push(&_method_data); 411 it->push(&_method_counters); 412 NOT_PRODUCT(it->push(&_name);) 413 } 414 415 #if INCLUDE_CDS 416 // Attempt to return method to original state. Clear any pointers 417 // (to objects outside the shared spaces). We won't be able to predict 418 // where they should point in a new JVM. Further initialize some 419 // entries now in order allow them to be write protected later. 420 421 void Method::remove_unshareable_info() { 422 unlink_method(); 423 if (method_data() != nullptr) { 424 method_data()->remove_unshareable_info(); 425 } 426 if (method_counters() != nullptr) { 427 method_counters()->remove_unshareable_info(); 428 } 429 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) { 430 _adapter->remove_unshareable_info(); 431 _adapter = nullptr; 432 } 433 JFR_ONLY(REMOVE_METHOD_ID(this);) 434 } 435 436 void Method::restore_unshareable_info(TRAPS) { 437 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored"); 438 if (method_data() != nullptr) { 439 method_data()->restore_unshareable_info(CHECK); 440 } 441 if (method_counters() != nullptr) { 442 method_counters()->restore_unshareable_info(CHECK); 443 } 444 if (_adapter != nullptr) { 445 assert(_adapter->is_linked(), "must be"); 446 _from_compiled_entry = _adapter->get_c2i_entry(); 447 _from_compiled_inline_entry = _adapter->get_c2i_inline_entry(); 448 _from_compiled_inline_ro_entry = _adapter->get_c2i_inline_ro_entry(); 449 } 450 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set"); 451 } 452 #endif 453 454 void Method::set_vtable_index(int index) { 455 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) { 456 // At runtime initialize_vtable is rerun as part of link_class_impl() 457 // for a shared class loaded by the non-boot loader to obtain the loader 458 // constraints based on the runtime classloaders' context. 459 return; // don't write into the shared class 460 } else { 461 _vtable_index = index; 462 } 463 } 464 465 void Method::set_itable_index(int index) { 466 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) { 467 // At runtime initialize_itable is rerun as part of link_class_impl() 468 // for a shared class loaded by the non-boot loader to obtain the loader 469 // constraints based on the runtime classloaders' context. The dumptime 470 // itable index should be the same as the runtime index. 471 assert(_vtable_index == itable_index_max - index, 472 "archived itable index is different from runtime index"); 473 return; // don't write into the shared class 474 } else { 475 _vtable_index = itable_index_max - index; 476 } 477 assert(valid_itable_index(), ""); 478 } 479 480 // The RegisterNatives call being attempted tried to register with a method that 481 // is not native. Ask JVM TI what prefixes have been specified. Then check 482 // to see if the native method is now wrapped with the prefixes. See the 483 // SetNativeMethodPrefix(es) functions in the JVM TI Spec for details. 484 static Method* find_prefixed_native(Klass* k, Symbol* name, Symbol* signature, TRAPS) { 485 #if INCLUDE_JVMTI 486 ResourceMark rm(THREAD); 487 Method* method; 488 int name_len = name->utf8_length(); 489 char* name_str = name->as_utf8(); 490 int prefix_count; 491 char** prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count); 492 for (int i = 0; i < prefix_count; i++) { 493 char* prefix = prefixes[i]; 494 int prefix_len = (int)strlen(prefix); 495 496 // try adding this prefix to the method name and see if it matches another method name 497 int trial_len = name_len + prefix_len; 498 char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1); 499 strcpy(trial_name_str, prefix); 500 strcat(trial_name_str, name_str); 501 TempNewSymbol trial_name = SymbolTable::probe(trial_name_str, trial_len); 502 if (trial_name == nullptr) { 503 continue; // no such symbol, so this prefix wasn't used, try the next prefix 504 } 505 method = k->lookup_method(trial_name, signature); 506 if (method == nullptr) { 507 continue; // signature doesn't match, try the next prefix 508 } 509 if (method->is_native()) { 510 method->set_is_prefixed_native(); 511 return method; // wahoo, we found a prefixed version of the method, return it 512 } 513 // found as non-native, so prefix is good, add it, probably just need more prefixes 514 name_len = trial_len; 515 name_str = trial_name_str; 516 } 517 #endif // INCLUDE_JVMTI 518 return nullptr; // not found 519 } 520 521 bool Method::register_native(Klass* k, Symbol* name, Symbol* signature, address entry, TRAPS) { 522 Method* method = k->lookup_method(name, signature); 523 if (method == nullptr) { 524 ResourceMark rm(THREAD); 525 stringStream st; 526 st.print("Method '"); 527 print_external_name(&st, k, name, signature); 528 st.print("' name or signature does not match"); 529 THROW_MSG_(vmSymbols::java_lang_NoSuchMethodError(), st.as_string(), false); 530 } 531 if (!method->is_native()) { 532 // trying to register to a non-native method, see if a JVM TI agent has added prefix(es) 533 method = find_prefixed_native(k, name, signature, THREAD); 534 if (method == nullptr) { 535 ResourceMark rm(THREAD); 536 stringStream st; 537 st.print("Method '"); 538 print_external_name(&st, k, name, signature); 539 st.print("' is not declared as native"); 540 THROW_MSG_(vmSymbols::java_lang_NoSuchMethodError(), st.as_string(), false); 541 } 542 } 543 544 if (entry != nullptr) { 545 method->set_native_function(entry, native_bind_event_is_interesting); 546 } else { 547 method->clear_native_function(); 548 } 549 if (log_is_enabled(Debug, jni, resolve)) { 550 ResourceMark rm(THREAD); 551 log_debug(jni, resolve)("[Registering JNI native method %s.%s]", 552 method->method_holder()->external_name(), 553 method->name()->as_C_string()); 554 } 555 return true; 556 } 557 558 bool Method::was_executed_more_than(int n) { 559 // Invocation counter is reset when the Method* is compiled. 560 // If the method has compiled code we therefore assume it has 561 // be executed more than n times. 562 if (is_accessor() || is_empty_method() || (code() != nullptr)) { 563 // interpreter doesn't bump invocation counter of trivial methods 564 // compiler does not bump invocation counter of compiled methods 565 return true; 566 } 567 else if ((method_counters() != nullptr && 568 method_counters()->invocation_counter()->carry()) || 569 (method_data() != nullptr && 570 method_data()->invocation_counter()->carry())) { 571 // The carry bit is set when the counter overflows and causes 572 // a compilation to occur. We don't know how many times 573 // the counter has been reset, so we simply assume it has 574 // been executed more than n times. 575 return true; 576 } else { 577 return invocation_count() > n; 578 } 579 } 580 581 void Method::print_invocation_count(outputStream* st) { 582 //---< compose+print method return type, klass, name, and signature >--- 583 if (is_static()) { st->print("static "); } 584 if (is_final()) { st->print("final "); } 585 if (is_synchronized()) { st->print("synchronized "); } 586 if (is_native()) { st->print("native "); } 587 st->print("%s::", method_holder()->external_name()); 588 name()->print_symbol_on(st); 589 signature()->print_symbol_on(st); 590 591 if (WizardMode) { 592 // dump the size of the byte codes 593 st->print(" {%d}", code_size()); 594 } 595 st->cr(); 596 597 // Counting based on signed int counters tends to overflow with 598 // longer-running workloads on fast machines. The counters under 599 // consideration here, however, are limited in range by counting 600 // logic. See InvocationCounter:count_limit for example. 601 // No "overflow precautions" need to be implemented here. 602 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count()); 603 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count()); 604 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count()); 605 606 if (method_data() != nullptr) { 607 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count()); 608 } 609 610 #ifndef PRODUCT 611 if (CountCompiledCalls) { 612 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count()); 613 } 614 #endif 615 } 616 617 MethodTrainingData* Method::training_data_or_null() const { 618 MethodCounters* mcs = method_counters(); 619 if (mcs == nullptr) { 620 return nullptr; 621 } else { 622 MethodTrainingData* mtd = mcs->method_training_data(); 623 if (mtd == mcs->method_training_data_sentinel()) { 624 return nullptr; 625 } 626 return mtd; 627 } 628 } 629 630 bool Method::init_training_data(MethodTrainingData* td) { 631 MethodCounters* mcs = method_counters(); 632 if (mcs == nullptr) { 633 return false; 634 } else { 635 return mcs->init_method_training_data(td); 636 } 637 } 638 639 bool Method::install_training_method_data(const methodHandle& method) { 640 MethodTrainingData* mtd = MethodTrainingData::find(method); 641 if (mtd != nullptr && mtd->final_profile() != nullptr) { 642 Atomic::replace_if_null(&method->_method_data, mtd->final_profile()); 643 return true; 644 } 645 return false; 646 } 647 648 // Build a MethodData* object to hold profiling information collected on this 649 // method when requested. 650 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) { 651 if (install_training_method_data(method)) { 652 return; 653 } 654 // Do not profile the method if metaspace has hit an OOM previously 655 // allocating profiling data. Callers clear pending exception so don't 656 // add one here. 657 if (ClassLoaderDataGraph::has_metaspace_oom()) { 658 return; 659 } 660 661 ClassLoaderData* loader_data = method->method_holder()->class_loader_data(); 662 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD); 663 if (HAS_PENDING_EXCEPTION) { 664 CompileBroker::log_metaspace_failure(); 665 ClassLoaderDataGraph::set_metaspace_oom(true); 666 return; // return the exception (which is cleared) 667 } 668 669 if (!Atomic::replace_if_null(&method->_method_data, method_data)) { 670 MetadataFactory::free_metadata(loader_data, method_data); 671 return; 672 } 673 674 if (PrintMethodData && (Verbose || WizardMode)) { 675 ResourceMark rm(THREAD); 676 tty->print("build_profiling_method_data for "); 677 method->print_name(tty); 678 tty->cr(); 679 // At the end of the run, the MDO, full of data, will be dumped. 680 } 681 } 682 683 MethodCounters* Method::build_method_counters(Thread* current, Method* m) { 684 // Do not profile the method if metaspace has hit an OOM previously 685 if (ClassLoaderDataGraph::has_metaspace_oom()) { 686 return nullptr; 687 } 688 689 methodHandle mh(current, m); 690 MethodCounters* counters; 691 if (current->is_Java_thread()) { 692 JavaThread* THREAD = JavaThread::cast(current); // For exception macros. 693 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold 694 // if needed. 695 counters = MethodCounters::allocate_with_exception(mh, THREAD); 696 if (HAS_PENDING_EXCEPTION) { 697 CLEAR_PENDING_EXCEPTION; 698 } 699 } else { 700 // Call metaspace allocation that doesn't throw exception if the 701 // current thread isn't a JavaThread, ie. the VMThread. 702 counters = MethodCounters::allocate_no_exception(mh); 703 } 704 705 if (counters == nullptr) { 706 CompileBroker::log_metaspace_failure(); 707 ClassLoaderDataGraph::set_metaspace_oom(true); 708 return nullptr; 709 } 710 711 if (!mh->init_method_counters(counters)) { 712 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters); 713 } 714 715 return mh->method_counters(); 716 } 717 718 bool Method::init_method_counters(MethodCounters* counters) { 719 // Try to install a pointer to MethodCounters, return true on success. 720 return Atomic::replace_if_null(&_method_counters, counters); 721 } 722 723 void Method::set_exception_handler_entered(int handler_bci) { 724 if (ProfileExceptionHandlers) { 725 MethodData* mdo = method_data(); 726 if (mdo != nullptr) { 727 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci); 728 handler_data.set_exception_handler_entered(); 729 } 730 } 731 } 732 733 int Method::extra_stack_words() { 734 // not an inline function, to avoid a header dependency on Interpreter 735 return extra_stack_entries() * Interpreter::stackElementSize; 736 } 737 738 // InlineKlass the method is declared to return. This must not 739 // safepoint as it is called with references live on the stack at 740 // locations the GC is unaware of. 741 InlineKlass* Method::returns_inline_type() const { 742 assert(InlineTypeReturnedAsFields, "Inline types should never be returned as fields"); 743 if (is_native()) { 744 return nullptr; 745 } 746 NoSafepointVerifier nsv; 747 SignatureStream ss(signature()); 748 ss.skip_to_return_type(); 749 return ss.as_inline_klass(method_holder()); 750 } 751 752 bool Method::compute_has_loops_flag() { 753 BytecodeStream bcs(methodHandle(Thread::current(), this)); 754 Bytecodes::Code bc; 755 756 while ((bc = bcs.next()) >= 0) { 757 switch (bc) { 758 case Bytecodes::_ifeq: 759 case Bytecodes::_ifnull: 760 case Bytecodes::_iflt: 761 case Bytecodes::_ifle: 762 case Bytecodes::_ifne: 763 case Bytecodes::_ifnonnull: 764 case Bytecodes::_ifgt: 765 case Bytecodes::_ifge: 766 case Bytecodes::_if_icmpeq: 767 case Bytecodes::_if_icmpne: 768 case Bytecodes::_if_icmplt: 769 case Bytecodes::_if_icmpgt: 770 case Bytecodes::_if_icmple: 771 case Bytecodes::_if_icmpge: 772 case Bytecodes::_if_acmpeq: 773 case Bytecodes::_if_acmpne: 774 case Bytecodes::_goto: 775 case Bytecodes::_jsr: 776 if (bcs.dest() < bcs.next_bci()) { 777 return set_has_loops(); 778 } 779 break; 780 781 case Bytecodes::_goto_w: 782 case Bytecodes::_jsr_w: 783 if (bcs.dest_w() < bcs.next_bci()) { 784 return set_has_loops(); 785 } 786 break; 787 788 case Bytecodes::_lookupswitch: { 789 Bytecode_lookupswitch lookupswitch(this, bcs.bcp()); 790 if (lookupswitch.default_offset() < 0) { 791 return set_has_loops(); 792 } else { 793 for (int i = 0; i < lookupswitch.number_of_pairs(); ++i) { 794 LookupswitchPair pair = lookupswitch.pair_at(i); 795 if (pair.offset() < 0) { 796 return set_has_loops(); 797 } 798 } 799 } 800 break; 801 } 802 case Bytecodes::_tableswitch: { 803 Bytecode_tableswitch tableswitch(this, bcs.bcp()); 804 if (tableswitch.default_offset() < 0) { 805 return set_has_loops(); 806 } else { 807 for (int i = 0; i < tableswitch.length(); ++i) { 808 if (tableswitch.dest_offset_at(i) < 0) { 809 return set_has_loops(); 810 } 811 } 812 } 813 break; 814 } 815 default: 816 break; 817 } 818 } 819 820 _flags.set_has_loops_flag_init(true); 821 return false; 822 } 823 824 bool Method::is_final_method(AccessFlags class_access_flags) const { 825 // or "does_not_require_vtable_entry" 826 // default method or overpass can occur, is not final (reuses vtable entry) 827 // private methods in classes get vtable entries for backward class compatibility. 828 if (is_overpass() || is_default_method()) return false; 829 return is_final() || class_access_flags.is_final(); 830 } 831 832 bool Method::is_final_method() const { 833 return is_final_method(method_holder()->access_flags()); 834 } 835 836 bool Method::is_default_method() const { 837 if (method_holder() != nullptr && 838 method_holder()->is_interface() && 839 !is_abstract() && !is_private()) { 840 return true; 841 } else { 842 return false; 843 } 844 } 845 846 bool Method::can_be_statically_bound(AccessFlags class_access_flags) const { 847 if (is_final_method(class_access_flags)) return true; 848 #ifdef ASSERT 849 bool is_nonv = (vtable_index() == nonvirtual_vtable_index); 850 if (class_access_flags.is_interface()) { 851 ResourceMark rm; 852 assert(is_nonv == is_static() || is_nonv == is_private(), 853 "nonvirtual unexpected for non-static, non-private: %s", 854 name_and_sig_as_C_string()); 855 } 856 #endif 857 assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question"); 858 return vtable_index() == nonvirtual_vtable_index; 859 } 860 861 bool Method::can_be_statically_bound() const { 862 return can_be_statically_bound(method_holder()->access_flags()); 863 } 864 865 bool Method::can_be_statically_bound(InstanceKlass* context) const { 866 return (method_holder() == context) && can_be_statically_bound(); 867 } 868 869 /** 870 * Returns false if this is one of specially treated methods for 871 * which we have to provide stack trace in throw in compiled code. 872 * Returns true otherwise. 873 */ 874 bool Method::can_omit_stack_trace() { 875 if (klass_name() == vmSymbols::sun_invoke_util_ValueConversions()) { 876 return false; // All methods in sun.invoke.util.ValueConversions 877 } 878 return true; 879 } 880 881 bool Method::is_accessor() const { 882 return is_getter() || is_setter(); 883 } 884 885 bool Method::is_getter() const { 886 if (code_size() != 5) return false; 887 if (size_of_parameters() != 1) return false; 888 if (java_code_at(0) != Bytecodes::_aload_0) return false; 889 if (java_code_at(1) != Bytecodes::_getfield) return false; 890 switch (java_code_at(4)) { 891 case Bytecodes::_ireturn: 892 case Bytecodes::_lreturn: 893 case Bytecodes::_freturn: 894 case Bytecodes::_dreturn: 895 case Bytecodes::_areturn: 896 break; 897 default: 898 return false; 899 } 900 if (has_scalarized_return()) { 901 // Don't treat this as (trivial) getter method because the 902 // inline type should be returned in a scalarized form. 903 return false; 904 } 905 return true; 906 } 907 908 bool Method::is_setter() const { 909 if (code_size() != 6) return false; 910 if (java_code_at(0) != Bytecodes::_aload_0) return false; 911 switch (java_code_at(1)) { 912 case Bytecodes::_iload_1: 913 case Bytecodes::_aload_1: 914 case Bytecodes::_fload_1: 915 if (size_of_parameters() != 2) return false; 916 break; 917 case Bytecodes::_dload_1: 918 case Bytecodes::_lload_1: 919 if (size_of_parameters() != 3) return false; 920 break; 921 default: 922 return false; 923 } 924 if (java_code_at(2) != Bytecodes::_putfield) return false; 925 if (java_code_at(5) != Bytecodes::_return) return false; 926 if (has_scalarized_args()) { 927 // Don't treat this as (trivial) setter method because the 928 // inline type argument should be passed in a scalarized form. 929 return false; 930 } 931 return true; 932 } 933 934 bool Method::is_constant_getter() const { 935 int last_index = code_size() - 1; 936 // Check if the first 1-3 bytecodes are a constant push 937 // and the last bytecode is a return. 938 return (2 <= code_size() && code_size() <= 4 && 939 Bytecodes::is_const(java_code_at(0)) && 940 Bytecodes::length_for(java_code_at(0)) == last_index && 941 Bytecodes::is_return(java_code_at(last_index)) && 942 !has_scalarized_args()); 943 } 944 945 bool Method::is_class_initializer() const { 946 // For classfiles version 51 or greater, ensure that the clinit method is 947 // static. Non-static methods with the name "<clinit>" are not static 948 // initializers. (older classfiles exempted for backward compatibility) 949 return (name() == vmSymbols::class_initializer_name() && 950 (is_static() || 951 method_holder()->major_version() < 51)); 952 } 953 954 // A method named <init>, is a classic object constructor. 955 bool Method::is_object_constructor() const { 956 return name() == vmSymbols::object_initializer_name(); 957 } 958 959 bool Method::needs_clinit_barrier() const { 960 return is_static() && !method_holder()->is_initialized(); 961 } 962 963 bool Method::is_object_wait0() const { 964 return klass_name() == vmSymbols::java_lang_Object() 965 && name() == vmSymbols::wait_name(); 966 } 967 968 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) { 969 int length = method->checked_exceptions_length(); 970 if (length == 0) { // common case 971 return objArrayHandle(THREAD, Universe::the_empty_class_array()); 972 } else { 973 methodHandle h_this(THREAD, method); 974 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle())); 975 objArrayHandle mirrors (THREAD, m_oop); 976 for (int i = 0; i < length; i++) { 977 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe 978 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle())); 979 if (log_is_enabled(Warning, exceptions) && 980 !k->is_subclass_of(vmClasses::Throwable_klass())) { 981 ResourceMark rm(THREAD); 982 log_warning(exceptions)( 983 "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable", 984 k->external_name(), method->external_name()); 985 } 986 mirrors->obj_at_put(i, k->java_mirror()); 987 } 988 return mirrors; 989 } 990 }; 991 992 993 int Method::line_number_from_bci(int bci) const { 994 int best_bci = 0; 995 int best_line = -1; 996 if (bci == SynchronizationEntryBCI) bci = 0; 997 if (0 <= bci && bci < code_size() && has_linenumber_table()) { 998 // The line numbers are a short array of 2-tuples [start_pc, line_number]. 999 // Not necessarily sorted and not necessarily one-to-one. 1000 CompressedLineNumberReadStream stream(compressed_linenumber_table()); 1001 while (stream.read_pair()) { 1002 if (stream.bci() == bci) { 1003 // perfect match 1004 return stream.line(); 1005 } else { 1006 // update best_bci/line 1007 if (stream.bci() < bci && stream.bci() >= best_bci) { 1008 best_bci = stream.bci(); 1009 best_line = stream.line(); 1010 } 1011 } 1012 } 1013 } 1014 return best_line; 1015 } 1016 1017 1018 bool Method::is_klass_loaded_by_klass_index(int klass_index) const { 1019 if( constants()->tag_at(klass_index).is_unresolved_klass()) { 1020 Thread *thread = Thread::current(); 1021 Symbol* klass_name = constants()->klass_name_at(klass_index); 1022 Handle loader(thread, method_holder()->class_loader()); 1023 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr; 1024 } else { 1025 return true; 1026 } 1027 } 1028 1029 1030 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const { 1031 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc); 1032 if (must_be_resolved) { 1033 // Make sure klass is resolved in constantpool. 1034 if (constants()->tag_at(klass_index).is_unresolved_klass()) { 1035 return false; 1036 } 1037 } 1038 return is_klass_loaded_by_klass_index(klass_index); 1039 } 1040 1041 1042 void Method::set_native_function(address function, bool post_event_flag) { 1043 assert(function != nullptr, "use clear_native_function to unregister natives"); 1044 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), ""); 1045 address* native_function = native_function_addr(); 1046 1047 // We can see racers trying to place the same native function into place. Once 1048 // is plenty. 1049 address current = *native_function; 1050 if (current == function) return; 1051 if (post_event_flag && JvmtiExport::should_post_native_method_bind() && 1052 function != nullptr) { 1053 // native_method_throw_unsatisfied_link_error_entry() should only 1054 // be passed when post_event_flag is false. 1055 assert(function != 1056 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), 1057 "post_event_flag mismatch"); 1058 1059 // post the bind event, and possible change the bind function 1060 JvmtiExport::post_native_method_bind(this, &function); 1061 } 1062 *native_function = function; 1063 // This function can be called more than once. We must make sure that we always 1064 // use the latest registered method -> check if a stub already has been generated. 1065 // If so, we have to make it not_entrant. 1066 nmethod* nm = code(); // Put it into local variable to guard against concurrent updates 1067 if (nm != nullptr) { 1068 nm->make_not_entrant(nmethod::InvalidationReason::SET_NATIVE_FUNCTION); 1069 } 1070 } 1071 1072 1073 bool Method::has_native_function() const { 1074 if (is_special_native_intrinsic()) 1075 return false; // special-cased in SharedRuntime::generate_native_wrapper 1076 address func = native_function(); 1077 return (func != nullptr && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry()); 1078 } 1079 1080 1081 void Method::clear_native_function() { 1082 // Note: is_method_handle_intrinsic() is allowed here. 1083 set_native_function( 1084 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), 1085 !native_bind_event_is_interesting); 1086 this->unlink_code(); 1087 } 1088 1089 1090 void Method::set_signature_handler(address handler) { 1091 address* signature_handler = signature_handler_addr(); 1092 *signature_handler = handler; 1093 } 1094 1095 1096 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) { 1097 assert(reason != nullptr, "must provide a reason"); 1098 if (PrintCompilation && report) { 1099 ttyLocker ttyl; 1100 tty->print("made not %scompilable on ", is_osr ? "OSR " : ""); 1101 if (comp_level == CompLevel_all) { 1102 tty->print("all levels "); 1103 } else { 1104 tty->print("level %d ", comp_level); 1105 } 1106 this->print_short_name(tty); 1107 int size = this->code_size(); 1108 if (size > 0) { 1109 tty->print(" (%d bytes)", size); 1110 } 1111 if (reason != nullptr) { 1112 tty->print(" %s", reason); 1113 } 1114 tty->cr(); 1115 } 1116 if ((TraceDeoptimization || LogCompilation) && (xtty != nullptr)) { 1117 ttyLocker ttyl; 1118 xtty->begin_elem("make_not_compilable thread='%zu' osr='%d' level='%d'", 1119 os::current_thread_id(), is_osr, comp_level); 1120 if (reason != nullptr) { 1121 xtty->print(" reason=\'%s\'", reason); 1122 } 1123 xtty->method(this); 1124 xtty->stamp(); 1125 xtty->end_elem(); 1126 } 1127 } 1128 1129 bool Method::is_always_compilable() const { 1130 // Generated adapters must be compiled 1131 if (is_special_native_intrinsic() && is_synthetic()) { 1132 assert(!is_not_c1_compilable(), "sanity check"); 1133 assert(!is_not_c2_compilable(), "sanity check"); 1134 return true; 1135 } 1136 1137 return false; 1138 } 1139 1140 bool Method::is_not_compilable(int comp_level) const { 1141 if (number_of_breakpoints() > 0) 1142 return true; 1143 if (is_always_compilable()) 1144 return false; 1145 if (comp_level == CompLevel_any) 1146 return is_not_c1_compilable() && is_not_c2_compilable(); 1147 if (is_c1_compile(comp_level)) 1148 return is_not_c1_compilable(); 1149 if (is_c2_compile(comp_level)) 1150 return is_not_c2_compilable(); 1151 return false; 1152 } 1153 1154 // call this when compiler finds that this method is not compilable 1155 void Method::set_not_compilable(const char* reason, int comp_level, bool report) { 1156 if (is_always_compilable()) { 1157 // Don't mark a method which should be always compilable 1158 return; 1159 } 1160 print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason); 1161 if (comp_level == CompLevel_all) { 1162 set_is_not_c1_compilable(); 1163 set_is_not_c2_compilable(); 1164 } else { 1165 if (is_c1_compile(comp_level)) 1166 set_is_not_c1_compilable(); 1167 if (is_c2_compile(comp_level)) 1168 set_is_not_c2_compilable(); 1169 } 1170 assert(!CompilationPolicy::can_be_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check"); 1171 } 1172 1173 bool Method::is_not_osr_compilable(int comp_level) const { 1174 if (is_not_compilable(comp_level)) 1175 return true; 1176 if (comp_level == CompLevel_any) 1177 return is_not_c1_osr_compilable() && is_not_c2_osr_compilable(); 1178 if (is_c1_compile(comp_level)) 1179 return is_not_c1_osr_compilable(); 1180 if (is_c2_compile(comp_level)) 1181 return is_not_c2_osr_compilable(); 1182 return false; 1183 } 1184 1185 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) { 1186 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason); 1187 if (comp_level == CompLevel_all) { 1188 set_is_not_c1_osr_compilable(); 1189 set_is_not_c2_osr_compilable(); 1190 } else { 1191 if (is_c1_compile(comp_level)) 1192 set_is_not_c1_osr_compilable(); 1193 if (is_c2_compile(comp_level)) 1194 set_is_not_c2_osr_compilable(); 1195 } 1196 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check"); 1197 } 1198 1199 // Revert to using the interpreter and clear out the nmethod 1200 void Method::clear_code() { 1201 // this may be null if c2i adapters have not been made yet 1202 // Only should happen at allocate time. 1203 if (adapter() == nullptr) { 1204 _from_compiled_entry = nullptr; 1205 _from_compiled_inline_entry = nullptr; 1206 _from_compiled_inline_ro_entry = nullptr; 1207 } else { 1208 _from_compiled_entry = adapter()->get_c2i_entry(); 1209 _from_compiled_inline_entry = adapter()->get_c2i_inline_entry(); 1210 _from_compiled_inline_ro_entry = adapter()->get_c2i_inline_ro_entry(); 1211 } 1212 OrderAccess::storestore(); 1213 _from_interpreted_entry = _i2i_entry; 1214 OrderAccess::storestore(); 1215 _code = nullptr; 1216 } 1217 1218 void Method::unlink_code(nmethod *compare) { 1219 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag); 1220 // We need to check if either the _code or _from_compiled_code_entry_point 1221 // refer to this nmethod because there is a race in setting these two fields 1222 // in Method* as seen in bugid 4947125. 1223 if (code() == compare || 1224 from_compiled_entry() == compare->verified_entry_point()) { 1225 clear_code(); 1226 } 1227 } 1228 1229 void Method::unlink_code() { 1230 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag); 1231 clear_code(); 1232 } 1233 1234 #if INCLUDE_CDS 1235 // Called by class data sharing to remove any entry points (which are not shared) 1236 void Method::unlink_method() { 1237 assert(CDSConfig::is_dumping_archive(), "sanity"); 1238 _code = nullptr; 1239 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) { 1240 _adapter = nullptr; 1241 } 1242 _i2i_entry = nullptr; 1243 _from_compiled_entry = nullptr; 1244 _from_compiled_inline_entry = nullptr; 1245 _from_compiled_inline_ro_entry = nullptr; 1246 _from_interpreted_entry = nullptr; 1247 1248 if (is_native()) { 1249 *native_function_addr() = nullptr; 1250 set_signature_handler(nullptr); 1251 } 1252 NOT_PRODUCT(set_compiled_invocation_count(0);) 1253 1254 clear_method_data(); 1255 clear_method_counters(); 1256 clear_is_not_c1_compilable(); 1257 clear_is_not_c1_osr_compilable(); 1258 clear_is_not_c2_compilable(); 1259 clear_is_not_c2_osr_compilable(); 1260 clear_queued_for_compilation(); 1261 1262 remove_unshareable_flags(); 1263 } 1264 1265 void Method::remove_unshareable_flags() { 1266 // clear all the flags that shouldn't be in the archived version 1267 assert(!is_old(), "must be"); 1268 assert(!is_obsolete(), "must be"); 1269 assert(!is_deleted(), "must be"); 1270 1271 set_is_prefixed_native(false); 1272 set_queued_for_compilation(false); 1273 set_is_not_c2_compilable(false); 1274 set_is_not_c1_compilable(false); 1275 set_is_not_c2_osr_compilable(false); 1276 set_on_stack_flag(false); 1277 set_has_scalarized_args(false); 1278 set_has_scalarized_return(false); 1279 } 1280 #endif 1281 1282 // Called when the method_holder is getting linked. Setup entrypoints so the method 1283 // is ready to be called from interpreter, compiler, and vtables. 1284 void Method::link_method(const methodHandle& h_method, TRAPS) { 1285 if (log_is_enabled(Info, perf, class, link)) { 1286 ClassLoader::perf_ik_link_methods_count()->inc(); 1287 } 1288 1289 // If the code cache is full, we may reenter this function for the 1290 // leftover methods that weren't linked. 1291 if (adapter() != nullptr) { 1292 if (adapter()->is_shared()) { 1293 assert(adapter()->is_linked(), "Adapter is shared but not linked"); 1294 } else { 1295 return; 1296 } 1297 } 1298 assert( _code == nullptr, "nothing compiled yet" ); 1299 1300 // Setup interpreter entrypoint 1301 assert(this == h_method(), "wrong h_method()" ); 1302 1303 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache"); 1304 address entry = Interpreter::entry_for_method(h_method); 1305 assert(entry != nullptr, "interpreter entry must be non-null"); 1306 // Sets both _i2i_entry and _from_interpreted_entry 1307 set_interpreter_entry(entry); 1308 1309 // Don't overwrite already registered native entries. 1310 if (is_native() && !has_native_function()) { 1311 set_native_function( 1312 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), 1313 !native_bind_event_is_interesting); 1314 } 1315 if (InlineTypeReturnedAsFields && returns_inline_type() && !has_scalarized_return()) { 1316 set_has_scalarized_return(); 1317 } 1318 1319 // Setup compiler entrypoint. This is made eagerly, so we do not need 1320 // special handling of vtables. An alternative is to make adapters more 1321 // lazily by calling make_adapter() from from_compiled_entry() for the 1322 // normal calls. For vtable calls life gets more complicated. When a 1323 // call-site goes mega-morphic we need adapters in all methods which can be 1324 // called from the vtable. We need adapters on such methods that get loaded 1325 // later. Ditto for mega-morphic itable calls. If this proves to be a 1326 // problem we'll make these lazily later. 1327 if (_adapter == nullptr) { 1328 (void) make_adapters(h_method, CHECK); 1329 assert(adapter()->is_linked(), "Adapter must have been linked"); 1330 } 1331 1332 // ONLY USE the h_method now as make_adapter may have blocked 1333 1334 if (h_method->is_continuation_native_intrinsic()) { 1335 _from_interpreted_entry = nullptr; 1336 _from_compiled_entry = nullptr; 1337 _i2i_entry = nullptr; 1338 if (Continuations::enabled()) { 1339 assert(!Threads::is_vm_complete(), "should only be called during vm init"); 1340 AdapterHandlerLibrary::create_native_wrapper(h_method); 1341 if (!h_method->has_compiled_code()) { 1342 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small"); 1343 } 1344 assert(_from_interpreted_entry == get_i2c_entry(), "invariant"); 1345 } 1346 } 1347 } 1348 1349 address Method::make_adapters(const methodHandle& mh, TRAPS) { 1350 PerfTraceTime timer(ClassLoader::perf_method_adapters_time()); 1351 1352 // Adapters for compiled code are made eagerly here. They are fairly 1353 // small (generally < 100 bytes) and quick to make (and cached and shared) 1354 // so making them eagerly shouldn't be too expensive. 1355 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh); 1356 if (adapter == nullptr ) { 1357 if (!is_init_completed()) { 1358 // Don't throw exceptions during VM initialization because java.lang.* classes 1359 // might not have been initialized, causing problems when constructing the 1360 // Java exception object. 1361 vm_exit_during_initialization("Out of space in CodeCache for adapters"); 1362 } else { 1363 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters"); 1364 } 1365 } 1366 1367 mh->set_adapter_entry(adapter); 1368 mh->_from_compiled_entry = adapter->get_c2i_entry(); 1369 mh->_from_compiled_inline_entry = adapter->get_c2i_inline_entry(); 1370 mh->_from_compiled_inline_ro_entry = adapter->get_c2i_inline_ro_entry(); 1371 return adapter->get_c2i_entry(); 1372 } 1373 1374 // The verified_code_entry() must be called when a invoke is resolved 1375 // on this method. 1376 1377 // It returns the compiled code entry point, after asserting not null. 1378 // This function is called after potential safepoints so that nmethod 1379 // or adapter that it points to is still live and valid. 1380 // This function must not hit a safepoint! 1381 address Method::verified_code_entry() { 1382 DEBUG_ONLY(NoSafepointVerifier nsv;) 1383 assert(_from_compiled_entry != nullptr, "must be set"); 1384 return _from_compiled_entry; 1385 } 1386 1387 address Method::verified_inline_code_entry() { 1388 DEBUG_ONLY(NoSafepointVerifier nsv;) 1389 assert(_from_compiled_inline_entry != nullptr, "must be set"); 1390 return _from_compiled_inline_entry; 1391 } 1392 1393 address Method::verified_inline_ro_code_entry() { 1394 DEBUG_ONLY(NoSafepointVerifier nsv;) 1395 assert(_from_compiled_inline_ro_entry != nullptr, "must be set"); 1396 return _from_compiled_inline_ro_entry; 1397 } 1398 1399 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all 1400 // (could be racing a deopt). 1401 // Not inline to avoid circular ref. 1402 bool Method::check_code() const { 1403 // cached in a register or local. There's a race on the value of the field. 1404 nmethod *code = Atomic::load_acquire(&_code); 1405 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method()); 1406 } 1407 1408 // Install compiled code. Instantly it can execute. 1409 void Method::set_code(const methodHandle& mh, nmethod *code) { 1410 assert_lock_strong(NMethodState_lock); 1411 assert( code, "use clear_code to remove code" ); 1412 assert( mh->check_code(), "" ); 1413 1414 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!"); 1415 1416 // These writes must happen in this order, because the interpreter will 1417 // directly jump to from_interpreted_entry which jumps to an i2c adapter 1418 // which jumps to _from_compiled_entry. 1419 mh->_code = code; // Assign before allowing compiled code to exec 1420 1421 int comp_level = code->comp_level(); 1422 // In theory there could be a race here. In practice it is unlikely 1423 // and not worth worrying about. 1424 if (comp_level > mh->highest_comp_level()) { 1425 mh->set_highest_comp_level(comp_level); 1426 } 1427 1428 OrderAccess::storestore(); 1429 mh->_from_compiled_entry = code->verified_entry_point(); 1430 mh->_from_compiled_inline_entry = code->verified_inline_entry_point(); 1431 mh->_from_compiled_inline_ro_entry = code->verified_inline_ro_entry_point(); 1432 OrderAccess::storestore(); 1433 1434 if (mh->is_continuation_native_intrinsic()) { 1435 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method 1436 1437 if (mh->is_continuation_enter_intrinsic()) { 1438 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted 1439 mh->_i2i_entry = ContinuationEntry::interpreted_entry(); 1440 } else if (mh->is_continuation_yield_intrinsic()) { 1441 mh->_i2i_entry = mh->get_i2c_entry(); 1442 } else { 1443 guarantee(false, "Unknown Continuation native intrinsic"); 1444 } 1445 // This must come last, as it is what's tested in LinkResolver::resolve_static_call 1446 Atomic::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry()); 1447 } else if (!mh->is_method_handle_intrinsic()) { 1448 // Instantly compiled code can execute. 1449 mh->_from_interpreted_entry = mh->get_i2c_entry(); 1450 } 1451 } 1452 1453 1454 bool Method::is_overridden_in(Klass* k) const { 1455 InstanceKlass* ik = InstanceKlass::cast(k); 1456 1457 if (ik->is_interface()) return false; 1458 1459 // If method is an interface, we skip it - except if it 1460 // is a miranda method 1461 if (method_holder()->is_interface()) { 1462 // Check that method is not a miranda method 1463 if (ik->lookup_method(name(), signature()) == nullptr) { 1464 // No implementation exist - so miranda method 1465 return false; 1466 } 1467 return true; 1468 } 1469 1470 assert(ik->is_subclass_of(method_holder()), "should be subklass"); 1471 if (!has_vtable_index()) { 1472 return false; 1473 } else { 1474 Method* vt_m = ik->method_at_vtable(vtable_index()); 1475 return vt_m != this; 1476 } 1477 } 1478 1479 1480 // give advice about whether this Method* should be cached or not 1481 bool Method::should_not_be_cached() const { 1482 if (is_old()) { 1483 // This method has been redefined. It is either EMCP or obsolete 1484 // and we don't want to cache it because that would pin the method 1485 // down and prevent it from being collectible if and when it 1486 // finishes executing. 1487 return true; 1488 } 1489 1490 // caching this method should be just fine 1491 return false; 1492 } 1493 1494 1495 /** 1496 * Returns true if this is one of the specially treated methods for 1497 * security related stack walks (like Reflection.getCallerClass). 1498 */ 1499 bool Method::is_ignored_by_security_stack_walk() const { 1500 if (intrinsic_id() == vmIntrinsics::_invoke) { 1501 // This is Method.invoke() -- ignore it 1502 return true; 1503 } 1504 if (method_holder()->is_subclass_of(vmClasses::reflect_MethodAccessorImpl_klass())) { 1505 // This is an auxiliary frame -- ignore it 1506 return true; 1507 } 1508 if (is_method_handle_intrinsic() || is_compiled_lambda_form()) { 1509 // This is an internal adapter frame for method handles -- ignore it 1510 return true; 1511 } 1512 return false; 1513 } 1514 1515 1516 // Constant pool structure for invoke methods: 1517 enum { 1518 _imcp_invoke_name = 1, // utf8: 'invokeExact', etc. 1519 _imcp_invoke_signature, // utf8: (variable Symbol*) 1520 _imcp_limit 1521 }; 1522 1523 // Test if this method is an MH adapter frame generated by Java code. 1524 // Cf. java/lang/invoke/InvokerBytecodeGenerator 1525 bool Method::is_compiled_lambda_form() const { 1526 return intrinsic_id() == vmIntrinsics::_compiledLambdaForm; 1527 } 1528 1529 // Test if this method is an internal MH primitive method. 1530 bool Method::is_method_handle_intrinsic() const { 1531 vmIntrinsics::ID iid = intrinsic_id(); 1532 return (MethodHandles::is_signature_polymorphic(iid) && 1533 MethodHandles::is_signature_polymorphic_intrinsic(iid)); 1534 } 1535 1536 bool Method::has_member_arg() const { 1537 vmIntrinsics::ID iid = intrinsic_id(); 1538 return (MethodHandles::is_signature_polymorphic(iid) && 1539 MethodHandles::has_member_arg(iid)); 1540 } 1541 1542 // Make an instance of a signature-polymorphic internal MH primitive. 1543 methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid, 1544 Symbol* signature, 1545 TRAPS) { 1546 ResourceMark rm(THREAD); 1547 methodHandle empty; 1548 1549 InstanceKlass* holder = vmClasses::MethodHandle_klass(); 1550 Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid); 1551 assert(iid == MethodHandles::signature_polymorphic_name_id(name), ""); 1552 1553 log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string()); 1554 1555 // invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup) 1556 name->increment_refcount(); 1557 signature->increment_refcount(); 1558 1559 int cp_length = _imcp_limit; 1560 ClassLoaderData* loader_data = holder->class_loader_data(); 1561 constantPoolHandle cp; 1562 { 1563 ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty)); 1564 cp = constantPoolHandle(THREAD, cp_oop); 1565 } 1566 cp->copy_fields(holder->constants()); 1567 cp->set_pool_holder(holder); 1568 cp->symbol_at_put(_imcp_invoke_name, name); 1569 cp->symbol_at_put(_imcp_invoke_signature, signature); 1570 cp->set_has_preresolution(); 1571 cp->set_is_for_method_handle_intrinsic(); 1572 1573 // decide on access bits: public or not? 1574 u2 flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL); 1575 bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid); 1576 if (must_be_static) flags_bits |= JVM_ACC_STATIC; 1577 assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods"); 1578 1579 methodHandle m; 1580 { 1581 InlineTableSizes sizes; 1582 Method* m_oop = Method::allocate(loader_data, 0, 1583 accessFlags_from(flags_bits), &sizes, 1584 ConstMethod::NORMAL, 1585 name, 1586 CHECK_(empty)); 1587 m = methodHandle(THREAD, m_oop); 1588 } 1589 m->set_constants(cp()); 1590 m->set_name_index(_imcp_invoke_name); 1591 m->set_signature_index(_imcp_invoke_signature); 1592 assert(MethodHandles::is_signature_polymorphic_name(m->name()), ""); 1593 assert(m->signature() == signature, ""); 1594 m->constMethod()->compute_from_signature(signature, must_be_static); 1595 m->init_intrinsic_id(klass_id_for_intrinsics(m->method_holder())); 1596 assert(m->is_method_handle_intrinsic(), ""); 1597 #ifdef ASSERT 1598 if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id())) m->print(); 1599 assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker"); 1600 assert(m->intrinsic_id() == iid, "correctly predicted iid"); 1601 #endif //ASSERT 1602 1603 // Finally, set up its entry points. 1604 assert(m->can_be_statically_bound(), ""); 1605 m->set_vtable_index(Method::nonvirtual_vtable_index); 1606 m->link_method(m, CHECK_(empty)); 1607 1608 if (iid == vmIntrinsics::_linkToNative) { 1609 m->set_interpreter_entry(m->adapter()->get_i2c_entry()); 1610 } 1611 if (log_is_enabled(Debug, methodhandles)) { 1612 LogTarget(Debug, methodhandles) lt; 1613 LogStream ls(lt); 1614 m->print_on(&ls); 1615 } 1616 1617 return m; 1618 } 1619 1620 #if INCLUDE_CDS 1621 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) { 1622 if (m->adapter() != nullptr) { 1623 m->set_from_compiled_entry(m->adapter()->get_c2i_entry()); 1624 m->set_from_compiled_inline_entry(m->adapter()->get_c2i_inline_entry()); 1625 m->set_from_compiled_inline_ro_entry(m->adapter()->get_c2i_inline_ro_entry()); 1626 } 1627 m->link_method(m, CHECK); 1628 1629 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) { 1630 m->set_interpreter_entry(m->adapter()->get_i2c_entry()); 1631 } 1632 } 1633 #endif 1634 1635 Klass* Method::check_non_bcp_klass(Klass* klass) { 1636 if (klass != nullptr && klass->class_loader() != nullptr) { 1637 if (klass->is_objArray_klass()) 1638 klass = ObjArrayKlass::cast(klass)->bottom_klass(); 1639 return klass; 1640 } 1641 return nullptr; 1642 } 1643 1644 1645 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length, 1646 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) { 1647 // Code below does not work for native methods - they should never get rewritten anyway 1648 assert(!m->is_native(), "cannot rewrite native methods"); 1649 // Allocate new Method* 1650 AccessFlags flags = m->access_flags(); 1651 1652 ConstMethod* cm = m->constMethod(); 1653 int checked_exceptions_len = cm->checked_exceptions_length(); 1654 int localvariable_len = cm->localvariable_table_length(); 1655 int exception_table_len = cm->exception_table_length(); 1656 int method_parameters_len = cm->method_parameters_length(); 1657 int method_annotations_len = cm->method_annotations_length(); 1658 int parameter_annotations_len = cm->parameter_annotations_length(); 1659 int type_annotations_len = cm->type_annotations_length(); 1660 int default_annotations_len = cm->default_annotations_length(); 1661 1662 InlineTableSizes sizes( 1663 localvariable_len, 1664 new_compressed_linenumber_size, 1665 exception_table_len, 1666 checked_exceptions_len, 1667 method_parameters_len, 1668 cm->generic_signature_index(), 1669 method_annotations_len, 1670 parameter_annotations_len, 1671 type_annotations_len, 1672 default_annotations_len, 1673 0); 1674 1675 ClassLoaderData* loader_data = m->method_holder()->class_loader_data(); 1676 Method* newm_oop = Method::allocate(loader_data, 1677 new_code_length, 1678 flags, 1679 &sizes, 1680 m->method_type(), 1681 m->name(), 1682 CHECK_(methodHandle())); 1683 methodHandle newm (THREAD, newm_oop); 1684 1685 // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod* 1686 ConstMethod* newcm = newm->constMethod(); 1687 int new_const_method_size = newm->constMethod()->size(); 1688 1689 // This works because the source and target are both Methods. Some compilers 1690 // (e.g., clang) complain that the target vtable pointer will be stomped, 1691 // so cast away newm()'s and m()'s Methodness. 1692 memcpy((void*)newm(), (void*)m(), sizeof(Method)); 1693 1694 // Create shallow copy of ConstMethod. 1695 memcpy(newcm, m->constMethod(), sizeof(ConstMethod)); 1696 1697 // Reset correct method/const method, method size, and parameter info 1698 newm->set_constMethod(newcm); 1699 newm->constMethod()->set_code_size(new_code_length); 1700 newm->constMethod()->set_constMethod_size(new_const_method_size); 1701 assert(newm->code_size() == new_code_length, "check"); 1702 assert(newm->method_parameters_length() == method_parameters_len, "check"); 1703 assert(newm->checked_exceptions_length() == checked_exceptions_len, "check"); 1704 assert(newm->exception_table_length() == exception_table_len, "check"); 1705 assert(newm->localvariable_table_length() == localvariable_len, "check"); 1706 // Copy new byte codes 1707 memcpy(newm->code_base(), new_code, new_code_length); 1708 // Copy line number table 1709 if (new_compressed_linenumber_size > 0) { 1710 memcpy(newm->compressed_linenumber_table(), 1711 new_compressed_linenumber_table, 1712 new_compressed_linenumber_size); 1713 } 1714 // Copy method_parameters 1715 if (method_parameters_len > 0) { 1716 memcpy(newm->method_parameters_start(), 1717 m->method_parameters_start(), 1718 method_parameters_len * sizeof(MethodParametersElement)); 1719 } 1720 // Copy checked_exceptions 1721 if (checked_exceptions_len > 0) { 1722 memcpy(newm->checked_exceptions_start(), 1723 m->checked_exceptions_start(), 1724 checked_exceptions_len * sizeof(CheckedExceptionElement)); 1725 } 1726 // Copy exception table 1727 if (exception_table_len > 0) { 1728 memcpy(newm->exception_table_start(), 1729 m->exception_table_start(), 1730 exception_table_len * sizeof(ExceptionTableElement)); 1731 } 1732 // Copy local variable number table 1733 if (localvariable_len > 0) { 1734 memcpy(newm->localvariable_table_start(), 1735 m->localvariable_table_start(), 1736 localvariable_len * sizeof(LocalVariableTableElement)); 1737 } 1738 // Copy stackmap table 1739 if (m->has_stackmap_table()) { 1740 int code_attribute_length = m->stackmap_data()->length(); 1741 Array<u1>* stackmap_data = 1742 MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_(methodHandle())); 1743 memcpy((void*)stackmap_data->adr_at(0), 1744 (void*)m->stackmap_data()->adr_at(0), code_attribute_length); 1745 newm->set_stackmap_data(stackmap_data); 1746 } 1747 1748 // copy annotations over to new method 1749 newcm->copy_annotations_from(loader_data, cm, CHECK_(methodHandle())); 1750 return newm; 1751 } 1752 1753 vmSymbolID Method::klass_id_for_intrinsics(const Klass* holder) { 1754 // if loader is not the default loader (i.e., non-null), we can't know the intrinsics 1755 // because we are not loading from core libraries 1756 // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar 1757 // which does not use the class default class loader so we check for its loader here 1758 const InstanceKlass* ik = InstanceKlass::cast(holder); 1759 if ((ik->class_loader() != nullptr) && !SystemDictionary::is_platform_class_loader(ik->class_loader())) { 1760 return vmSymbolID::NO_SID; // regardless of name, no intrinsics here 1761 } 1762 1763 // see if the klass name is well-known: 1764 Symbol* klass_name = ik->name(); 1765 vmSymbolID id = vmSymbols::find_sid(klass_name); 1766 if (id != vmSymbolID::NO_SID && vmIntrinsics::class_has_intrinsics(id)) { 1767 return id; 1768 } else { 1769 return vmSymbolID::NO_SID; 1770 } 1771 } 1772 1773 void Method::init_intrinsic_id(vmSymbolID klass_id) { 1774 assert(_intrinsic_id == static_cast<int>(vmIntrinsics::_none), "do this just once"); 1775 const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte)); 1776 assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size"); 1777 assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), ""); 1778 1779 // the klass name is well-known: 1780 assert(klass_id == klass_id_for_intrinsics(method_holder()), "must be"); 1781 assert(klass_id != vmSymbolID::NO_SID, "caller responsibility"); 1782 1783 // ditto for method and signature: 1784 vmSymbolID name_id = vmSymbols::find_sid(name()); 1785 if (klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle) 1786 && klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle) 1787 && name_id == vmSymbolID::NO_SID) { 1788 return; 1789 } 1790 vmSymbolID sig_id = vmSymbols::find_sid(signature()); 1791 if (klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle) 1792 && klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle) 1793 && sig_id == vmSymbolID::NO_SID) { 1794 return; 1795 } 1796 1797 u2 flags = access_flags().as_method_flags(); 1798 vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags); 1799 if (id != vmIntrinsics::_none) { 1800 set_intrinsic_id(id); 1801 if (id == vmIntrinsics::_Class_cast) { 1802 // Even if the intrinsic is rejected, we want to inline this simple method. 1803 set_force_inline(); 1804 } 1805 return; 1806 } 1807 1808 // A few slightly irregular cases: 1809 switch (klass_id) { 1810 // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*., VarHandle 1811 case VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle): 1812 case VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle): 1813 if (!is_native()) break; 1814 id = MethodHandles::signature_polymorphic_name_id(method_holder(), name()); 1815 if (is_static() != MethodHandles::is_signature_polymorphic_static(id)) 1816 id = vmIntrinsics::_none; 1817 break; 1818 1819 default: 1820 break; 1821 } 1822 1823 if (id != vmIntrinsics::_none) { 1824 // Set up its iid. It is an alias method. 1825 set_intrinsic_id(id); 1826 return; 1827 } 1828 } 1829 1830 bool Method::load_signature_classes(const methodHandle& m, TRAPS) { 1831 if (!THREAD->can_call_java()) { 1832 // There is nothing useful this routine can do from within the Compile thread. 1833 // Hopefully, the signature contains only well-known classes. 1834 // We could scan for this and return true/false, but the caller won't care. 1835 return false; 1836 } 1837 bool sig_is_loaded = true; 1838 ResourceMark rm(THREAD); 1839 for (ResolvingSignatureStream ss(m()); !ss.is_done(); ss.next()) { 1840 if (ss.is_reference()) { 1841 // load everything, including arrays "[Lfoo;" 1842 Klass* klass = ss.as_klass(SignatureStream::ReturnNull, THREAD); 1843 // We are loading classes eagerly. If a ClassNotFoundException or 1844 // a LinkageError was generated, be sure to ignore it. 1845 if (HAS_PENDING_EXCEPTION) { 1846 if (PENDING_EXCEPTION->is_a(vmClasses::ClassNotFoundException_klass()) || 1847 PENDING_EXCEPTION->is_a(vmClasses::LinkageError_klass())) { 1848 CLEAR_PENDING_EXCEPTION; 1849 } else { 1850 return false; 1851 } 1852 } 1853 if( klass == nullptr) { sig_is_loaded = false; } 1854 } 1855 } 1856 return sig_is_loaded; 1857 } 1858 1859 // Exposed so field engineers can debug VM 1860 void Method::print_short_name(outputStream* st) const { 1861 ResourceMark rm; 1862 #ifdef PRODUCT 1863 st->print(" %s::", method_holder()->external_name()); 1864 #else 1865 st->print(" %s::", method_holder()->internal_name()); 1866 #endif 1867 name()->print_symbol_on(st); 1868 if (WizardMode) signature()->print_symbol_on(st); 1869 else if (MethodHandles::is_signature_polymorphic(intrinsic_id())) 1870 MethodHandles::print_as_basic_type_signature_on(st, signature()); 1871 } 1872 1873 // Comparer for sorting an object array containing 1874 // Method*s. 1875 static int method_comparator(Method* a, Method* b) { 1876 return a->name()->fast_compare(b->name()); 1877 } 1878 1879 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array 1880 // default_methods also uses this without the ordering for fast find_method 1881 void Method::sort_methods(Array<Method*>* methods, bool set_idnums, method_comparator_func func) { 1882 int length = methods->length(); 1883 if (length > 1) { 1884 if (func == nullptr) { 1885 func = method_comparator; 1886 } 1887 { 1888 NoSafepointVerifier nsv; 1889 QuickSort::sort(methods->data(), length, func); 1890 } 1891 // Reset method ordering 1892 if (set_idnums) { 1893 for (u2 i = 0; i < length; i++) { 1894 Method* m = methods->at(i); 1895 m->set_method_idnum(i); 1896 m->set_orig_method_idnum(i); 1897 } 1898 } 1899 } 1900 } 1901 1902 //----------------------------------------------------------------------------------- 1903 // Non-product code unless JVM/TI needs it 1904 1905 #if !defined(PRODUCT) || INCLUDE_JVMTI 1906 class SignatureTypePrinter : public SignatureTypeNames { 1907 private: 1908 outputStream* _st; 1909 bool _use_separator; 1910 1911 void type_name(const char* name) { 1912 if (_use_separator) _st->print(", "); 1913 _st->print("%s", name); 1914 _use_separator = true; 1915 } 1916 1917 public: 1918 SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) { 1919 _st = st; 1920 _use_separator = false; 1921 } 1922 1923 void print_parameters() { _use_separator = false; do_parameters_on(this); } 1924 void print_returntype() { _use_separator = false; do_type(return_type()); } 1925 }; 1926 1927 1928 void Method::print_name(outputStream* st) const { 1929 Thread *thread = Thread::current(); 1930 ResourceMark rm(thread); 1931 st->print("%s ", is_static() ? "static" : "virtual"); 1932 if (WizardMode) { 1933 st->print("%s.", method_holder()->internal_name()); 1934 name()->print_symbol_on(st); 1935 signature()->print_symbol_on(st); 1936 } else { 1937 SignatureTypePrinter sig(signature(), st); 1938 sig.print_returntype(); 1939 st->print(" %s.", method_holder()->internal_name()); 1940 name()->print_symbol_on(st); 1941 st->print("("); 1942 sig.print_parameters(); 1943 st->print(")"); 1944 } 1945 } 1946 #endif // !PRODUCT || INCLUDE_JVMTI 1947 1948 1949 void Method::print_codes_on(outputStream* st, int flags) const { 1950 print_codes_on(0, code_size(), st, flags); 1951 } 1952 1953 void Method::print_codes_on(int from, int to, outputStream* st, int flags) const { 1954 Thread *thread = Thread::current(); 1955 ResourceMark rm(thread); 1956 methodHandle mh (thread, (Method*)this); 1957 BytecodeTracer::print_method_codes(mh, from, to, st, flags); 1958 } 1959 1960 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) { 1961 _bci = 0; 1962 _line = 0; 1963 }; 1964 1965 bool CompressedLineNumberReadStream::read_pair() { 1966 jubyte next = read_byte(); 1967 // Check for terminator 1968 if (next == 0) return false; 1969 if (next == 0xFF) { 1970 // Escape character, regular compression used 1971 _bci += read_signed_int(); 1972 _line += read_signed_int(); 1973 } else { 1974 // Single byte compression used 1975 _bci += next >> 3; 1976 _line += next & 0x7; 1977 } 1978 return true; 1979 } 1980 1981 #if INCLUDE_JVMTI 1982 1983 Bytecodes::Code Method::orig_bytecode_at(int bci) const { 1984 BreakpointInfo* bp = method_holder()->breakpoints(); 1985 for (; bp != nullptr; bp = bp->next()) { 1986 if (bp->match(this, bci)) { 1987 return bp->orig_bytecode(); 1988 } 1989 } 1990 { 1991 ResourceMark rm; 1992 fatal("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci); 1993 } 1994 return Bytecodes::_shouldnotreachhere; 1995 } 1996 1997 void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) { 1998 assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way"); 1999 BreakpointInfo* bp = method_holder()->breakpoints(); 2000 for (; bp != nullptr; bp = bp->next()) { 2001 if (bp->match(this, bci)) { 2002 bp->set_orig_bytecode(code); 2003 // and continue, in case there is more than one 2004 } 2005 } 2006 } 2007 2008 void Method::set_breakpoint(int bci) { 2009 InstanceKlass* ik = method_holder(); 2010 BreakpointInfo *bp = new BreakpointInfo(this, bci); 2011 bp->set_next(ik->breakpoints()); 2012 ik->set_breakpoints(bp); 2013 // do this last: 2014 bp->set(this); 2015 } 2016 2017 static void clear_matches(Method* m, int bci) { 2018 InstanceKlass* ik = m->method_holder(); 2019 BreakpointInfo* prev_bp = nullptr; 2020 BreakpointInfo* next_bp; 2021 for (BreakpointInfo* bp = ik->breakpoints(); bp != nullptr; bp = next_bp) { 2022 next_bp = bp->next(); 2023 // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint). 2024 if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) { 2025 // do this first: 2026 bp->clear(m); 2027 // unhook it 2028 if (prev_bp != nullptr) 2029 prev_bp->set_next(next_bp); 2030 else 2031 ik->set_breakpoints(next_bp); 2032 delete bp; 2033 // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods 2034 // at same location. So we have multiple matching (method_index and bci) 2035 // BreakpointInfo nodes in BreakpointInfo list. We should just delete one 2036 // breakpoint for clear_breakpoint request and keep all other method versions 2037 // BreakpointInfo for future clear_breakpoint request. 2038 // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints) 2039 // which is being called when class is unloaded. We delete all the Breakpoint 2040 // information for all versions of method. We may not correctly restore the original 2041 // bytecode in all method versions, but that is ok. Because the class is being unloaded 2042 // so these methods won't be used anymore. 2043 if (bci >= 0) { 2044 break; 2045 } 2046 } else { 2047 // This one is a keeper. 2048 prev_bp = bp; 2049 } 2050 } 2051 } 2052 2053 void Method::clear_breakpoint(int bci) { 2054 assert(bci >= 0, ""); 2055 clear_matches(this, bci); 2056 } 2057 2058 void Method::clear_all_breakpoints() { 2059 clear_matches(this, -1); 2060 } 2061 2062 #endif // INCLUDE_JVMTI 2063 2064 int Method::highest_osr_comp_level() const { 2065 const MethodCounters* mcs = method_counters(); 2066 if (mcs != nullptr) { 2067 return mcs->highest_osr_comp_level(); 2068 } else { 2069 return CompLevel_none; 2070 } 2071 } 2072 2073 void Method::set_highest_comp_level(int level) { 2074 MethodCounters* mcs = method_counters(); 2075 if (mcs != nullptr) { 2076 mcs->set_highest_comp_level(level); 2077 } 2078 } 2079 2080 void Method::set_highest_osr_comp_level(int level) { 2081 MethodCounters* mcs = method_counters(); 2082 if (mcs != nullptr) { 2083 mcs->set_highest_osr_comp_level(level); 2084 } 2085 } 2086 2087 #if INCLUDE_JVMTI 2088 2089 BreakpointInfo::BreakpointInfo(Method* m, int bci) { 2090 _bci = bci; 2091 _name_index = m->name_index(); 2092 _signature_index = m->signature_index(); 2093 _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci); 2094 if (_orig_bytecode == Bytecodes::_breakpoint) 2095 _orig_bytecode = m->orig_bytecode_at(_bci); 2096 _next = nullptr; 2097 } 2098 2099 void BreakpointInfo::set(Method* method) { 2100 #ifdef ASSERT 2101 { 2102 Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci); 2103 if (code == Bytecodes::_breakpoint) 2104 code = method->orig_bytecode_at(_bci); 2105 assert(orig_bytecode() == code, "original bytecode must be the same"); 2106 } 2107 #endif 2108 Thread *thread = Thread::current(); 2109 *method->bcp_from(_bci) = Bytecodes::_breakpoint; 2110 method->incr_number_of_breakpoints(thread); 2111 { 2112 // Deoptimize all dependents on this method 2113 HandleMark hm(thread); 2114 methodHandle mh(thread, method); 2115 CodeCache::mark_dependents_on_method_for_breakpoint(mh); 2116 } 2117 } 2118 2119 void BreakpointInfo::clear(Method* method) { 2120 *method->bcp_from(_bci) = orig_bytecode(); 2121 assert(method->number_of_breakpoints() > 0, "must not go negative"); 2122 method->decr_number_of_breakpoints(Thread::current()); 2123 } 2124 2125 #endif // INCLUDE_JVMTI 2126 2127 // jmethodID handling 2128 // jmethodIDs are 64-bit integers that will never run out and are mapped in a table 2129 // to their Method and vice versa. If JNI code has access to stale jmethodID, this 2130 // wastes no memory but the Method* returned is null. 2131 2132 // Add a method id to the jmethod_ids 2133 jmethodID Method::make_jmethod_id(ClassLoaderData* cld, Method* m) { 2134 // Have to add jmethod_ids() to class loader data thread-safely. 2135 // Also have to add the method to the InstanceKlass list safely, which the lock 2136 // protects as well. 2137 assert(JmethodIdCreation_lock->owned_by_self(), "sanity check"); 2138 jmethodID jmid = JmethodIDTable::make_jmethod_id(m); 2139 assert(jmid != nullptr, "must be created"); 2140 2141 // Add to growable array in CLD. 2142 cld->add_jmethod_id(jmid); 2143 return jmid; 2144 } 2145 2146 // This looks in the InstanceKlass cache, then calls back to make_jmethod_id if not found. 2147 jmethodID Method::jmethod_id() { 2148 return method_holder()->get_jmethod_id(this); 2149 } 2150 2151 // Get the Method out of the table given the method id. 2152 Method* Method::resolve_jmethod_id(jmethodID mid) { 2153 assert(mid != nullptr, "JNI method id should not be null"); 2154 return JmethodIDTable::resolve_jmethod_id(mid); 2155 } 2156 2157 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) { 2158 // Can't assert the method_holder is the same because the new method has the 2159 // scratch method holder. 2160 assert(resolve_jmethod_id(jmid)->method_holder()->class_loader() 2161 == new_method->method_holder()->class_loader() || 2162 new_method->method_holder()->class_loader() == nullptr, // allow substitution to Unsafe method 2163 "changing to a different class loader"); 2164 JmethodIDTable::change_method_associated_with_jmethod_id(jmid, new_method); 2165 } 2166 2167 // If there's a jmethodID for this method, clear the Method 2168 // but leave jmethodID for this method in the table. 2169 // It's deallocated with class unloading. 2170 void Method::clear_jmethod_id() { 2171 jmethodID mid = method_holder()->jmethod_id_or_null(this); 2172 if (mid != nullptr) { 2173 JmethodIDTable::clear_jmethod_id(mid, this); 2174 } 2175 } 2176 2177 bool Method::validate_jmethod_id(jmethodID mid) { 2178 Method* m = resolve_jmethod_id(mid); 2179 assert(m != nullptr, "should be called with non-null method"); 2180 InstanceKlass* ik = m->method_holder(); 2181 ClassLoaderData* cld = ik->class_loader_data(); 2182 if (cld->jmethod_ids() == nullptr) return false; 2183 return (cld->jmethod_ids()->contains(mid)); 2184 } 2185 2186 Method* Method::checked_resolve_jmethod_id(jmethodID mid) { 2187 if (mid == nullptr) return nullptr; 2188 Method* o = resolve_jmethod_id(mid); 2189 if (o == nullptr) { 2190 return nullptr; 2191 } 2192 // Method should otherwise be valid. Assert for testing. 2193 assert(is_valid_method(o), "should be valid jmethodid"); 2194 // If the method's class holder object is unreferenced, but not yet marked as 2195 // unloaded, we need to return null here too because after a safepoint, its memory 2196 // will be reclaimed. 2197 return o->method_holder()->is_loader_alive() ? o : nullptr; 2198 } 2199 2200 void Method::set_on_stack(const bool value) { 2201 // Set both the method itself and its constant pool. The constant pool 2202 // on stack means some method referring to it is also on the stack. 2203 constants()->set_on_stack(value); 2204 2205 bool already_set = on_stack_flag(); 2206 set_on_stack_flag(value); 2207 if (value && !already_set) { 2208 MetadataOnStackMark::record(this); 2209 } 2210 } 2211 2212 void Method::record_gc_epoch() { 2213 // If any method is on the stack in continuations, none of them can be reclaimed, 2214 // so save the marking cycle to check for the whole class in the cpCache. 2215 // The cpCache is writeable. 2216 constants()->cache()->record_gc_epoch(); 2217 } 2218 2219 bool Method::has_method_vptr(const void* ptr) { 2220 Method m; 2221 // This assumes that the vtbl pointer is the first word of a C++ object. 2222 return dereference_vptr(&m) == dereference_vptr(ptr); 2223 } 2224 2225 // Check that this pointer is valid by checking that the vtbl pointer matches 2226 bool Method::is_valid_method(const Method* m) { 2227 if (m == nullptr) { 2228 return false; 2229 } else if ((intptr_t(m) & (wordSize-1)) != 0) { 2230 // Quick sanity check on pointer. 2231 return false; 2232 } else if (!os::is_readable_range(m, m + 1)) { 2233 return false; 2234 } else if (m->is_shared()) { 2235 return CppVtables::is_valid_shared_method(m); 2236 } else if (Metaspace::contains_non_shared(m)) { 2237 return has_method_vptr((const void*)m); 2238 } else { 2239 return false; 2240 } 2241 } 2242 2243 bool Method::is_scalarized_arg(int idx) const { 2244 if (!has_scalarized_args()) { 2245 return false; 2246 } 2247 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID 2248 int depth = 0; 2249 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc(); 2250 for (int i = 0; i < sig->length(); i++) { 2251 BasicType bt = sig->at(i)._bt; 2252 if (bt == T_METADATA) { 2253 depth++; 2254 } 2255 if (idx == 0) { 2256 break; // Argument found 2257 } 2258 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) { 2259 depth--; 2260 } 2261 if (depth == 0 && bt != T_LONG && bt != T_DOUBLE) { 2262 idx--; // Advance to next argument 2263 } 2264 } 2265 return depth != 0; 2266 } 2267 2268 // Printing 2269 2270 #ifndef PRODUCT 2271 2272 void Method::print_on(outputStream* st) const { 2273 ResourceMark rm; 2274 assert(is_method(), "must be method"); 2275 st->print_cr("%s", internal_name()); 2276 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this)); 2277 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr(); 2278 st->print (" - constants: " PTR_FORMAT " ", p2i(constants())); 2279 constants()->print_value_on(st); st->cr(); 2280 st->print (" - access: 0x%x ", access_flags().as_method_flags()); access_flags().print_on(st); st->cr(); 2281 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr(); 2282 st->print (" - name: "); name()->print_value_on(st); st->cr(); 2283 st->print (" - signature: "); signature()->print_value_on(st); st->cr(); 2284 st->print_cr(" - max stack: %d", max_stack()); 2285 st->print_cr(" - max locals: %d", max_locals()); 2286 st->print_cr(" - size of params: %d", size_of_parameters()); 2287 st->print_cr(" - method size: %d", method_size()); 2288 if (intrinsic_id() != vmIntrinsics::_none) 2289 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id())); 2290 if (highest_comp_level() != CompLevel_none) 2291 st->print_cr(" - highest level: %d", highest_comp_level()); 2292 st->print_cr(" - vtable index: %d", _vtable_index); 2293 #ifdef ASSERT 2294 if (valid_itable_index()) 2295 st->print_cr(" - itable index: %d", itable_index()); 2296 #endif 2297 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry())); 2298 st->print( " - adapters: "); 2299 AdapterHandlerEntry* a = ((Method*)this)->adapter(); 2300 if (a == nullptr) 2301 st->print_cr(PTR_FORMAT, p2i(a)); 2302 else 2303 a->print_adapter_on(st); 2304 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry())); 2305 st->print_cr(" - compiled inline entry " PTR_FORMAT, p2i(from_compiled_inline_entry())); 2306 st->print_cr(" - compiled inline ro entry " PTR_FORMAT, p2i(from_compiled_inline_ro_entry())); 2307 st->print_cr(" - code size: %d", code_size()); 2308 if (code_size() != 0) { 2309 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base())); 2310 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size())); 2311 } 2312 if (method_data() != nullptr) { 2313 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data())); 2314 } 2315 st->print_cr(" - checked ex length: %d", checked_exceptions_length()); 2316 if (checked_exceptions_length() > 0) { 2317 CheckedExceptionElement* table = checked_exceptions_start(); 2318 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table)); 2319 if (Verbose) { 2320 for (int i = 0; i < checked_exceptions_length(); i++) { 2321 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index)); 2322 } 2323 } 2324 } 2325 if (has_linenumber_table()) { 2326 u_char* table = compressed_linenumber_table(); 2327 st->print_cr(" - linenumber start: " PTR_FORMAT, p2i(table)); 2328 if (Verbose) { 2329 CompressedLineNumberReadStream stream(table); 2330 while (stream.read_pair()) { 2331 st->print_cr(" - line %d: %d", stream.line(), stream.bci()); 2332 } 2333 } 2334 } 2335 st->print_cr(" - localvar length: %d", localvariable_table_length()); 2336 if (localvariable_table_length() > 0) { 2337 LocalVariableTableElement* table = localvariable_table_start(); 2338 st->print_cr(" - localvar start: " PTR_FORMAT, p2i(table)); 2339 if (Verbose) { 2340 for (int i = 0; i < localvariable_table_length(); i++) { 2341 int bci = table[i].start_bci; 2342 int len = table[i].length; 2343 const char* name = constants()->printable_name_at(table[i].name_cp_index); 2344 const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index); 2345 int slot = table[i].slot; 2346 st->print_cr(" - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot); 2347 } 2348 } 2349 } 2350 if (code() != nullptr) { 2351 st->print (" - compiled code: "); 2352 code()->print_value_on(st); 2353 } 2354 if (is_native()) { 2355 st->print_cr(" - native function: " PTR_FORMAT, p2i(native_function())); 2356 st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler())); 2357 } 2358 } 2359 2360 void Method::print_linkage_flags(outputStream* st) { 2361 access_flags().print_on(st); 2362 if (is_default_method()) { 2363 st->print("default "); 2364 } 2365 if (is_overpass()) { 2366 st->print("overpass "); 2367 } 2368 } 2369 #endif //PRODUCT 2370 2371 void Method::print_value_on(outputStream* st) const { 2372 assert(is_method(), "must be method"); 2373 st->print("%s", internal_name()); 2374 print_address_on(st); 2375 st->print(" "); 2376 if (WizardMode) access_flags().print_on(st); 2377 name()->print_value_on(st); 2378 st->print(" "); 2379 signature()->print_value_on(st); 2380 st->print(" in "); 2381 method_holder()->print_value_on(st); 2382 if (WizardMode) st->print("#%d", _vtable_index); 2383 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals()); 2384 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code()); 2385 } 2386 2387 // Verification 2388 2389 void Method::verify_on(outputStream* st) { 2390 guarantee(is_method(), "object must be method"); 2391 guarantee(constants()->is_constantPool(), "should be constant pool"); 2392 MethodData* md = method_data(); 2393 guarantee(md == nullptr || 2394 md->is_methodData(), "should be method data"); 2395 }