1 /*
   2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "cds/cppVtables.hpp"
  28 #include "cds/metaspaceShared.hpp"
  29 #include "classfile/classLoader.hpp"
  30 #include "classfile/classLoaderDataGraph.hpp"
  31 #include "classfile/metadataOnStackMark.hpp"
  32 #include "classfile/symbolTable.hpp"
  33 #include "classfile/systemDictionary.hpp"
  34 #include "classfile/vmClasses.hpp"
  35 #include "code/codeCache.hpp"
  36 #include "code/debugInfoRec.hpp"
  37 #include "compiler/compilationPolicy.hpp"
  38 #include "gc/shared/collectedHeap.inline.hpp"
  39 #include "interpreter/bytecodeStream.hpp"
  40 #include "interpreter/bytecodeTracer.hpp"
  41 #include "interpreter/bytecodes.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/oopMapCache.hpp"
  44 #include "logging/log.hpp"
  45 #include "logging/logStream.hpp"
  46 #include "logging/logTag.hpp"
  47 #include "memory/allocation.inline.hpp"
  48 #include "memory/metadataFactory.hpp"
  49 #include "memory/metaspaceClosure.hpp"
  50 #include "memory/oopFactory.hpp"
  51 #include "memory/resourceArea.hpp"
  52 #include "memory/universe.hpp"
  53 #include "nmt/memTracker.hpp"
  54 #include "oops/constMethod.hpp"
  55 #include "oops/constantPool.hpp"
  56 #include "oops/klass.inline.hpp"
  57 #include "oops/method.inline.hpp"
  58 #include "oops/methodData.hpp"
  59 #include "oops/objArrayKlass.hpp"
  60 #include "oops/objArrayOop.inline.hpp"
  61 #include "oops/oop.inline.hpp"
  62 #include "oops/symbol.hpp"
  63 #include "oops/trainingData.hpp"
  64 #include "prims/jvmtiExport.hpp"
  65 #include "prims/methodHandles.hpp"
  66 #include "runtime/atomic.hpp"
  67 #include "runtime/continuationEntry.hpp"
  68 #include "runtime/frame.inline.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/init.hpp"
  71 #include "runtime/java.hpp"
  72 #include "runtime/orderAccess.hpp"
  73 #include "runtime/perfData.hpp"
  74 #include "runtime/relocator.hpp"
  75 #include "runtime/safepointVerifiers.hpp"
  76 #include "runtime/sharedRuntime.hpp"
  77 #include "runtime/signature.hpp"
  78 #include "runtime/threads.hpp"
  79 #include "runtime/vm_version.hpp"
  80 #include "utilities/align.hpp"
  81 #include "utilities/quickSort.hpp"
  82 #include "utilities/vmError.hpp"
  83 #include "utilities/xmlstream.hpp"
  84 
  85 // Implementation of Method
  86 
  87 Method* Method::allocate(ClassLoaderData* loader_data,
  88                          int byte_code_size,
  89                          AccessFlags access_flags,
  90                          InlineTableSizes* sizes,
  91                          ConstMethod::MethodType method_type,
  92                          Symbol* name,
  93                          TRAPS) {
  94   assert(!access_flags.is_native() || byte_code_size == 0,
  95          "native methods should not contain byte codes");
  96   ConstMethod* cm = ConstMethod::allocate(loader_data,
  97                                           byte_code_size,
  98                                           sizes,
  99                                           method_type,
 100                                           CHECK_NULL);
 101   int size = Method::size(access_flags.is_native());
 102   return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
 103 }
 104 
 105 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
 106   NoSafepointVerifier no_safepoint;
 107   set_constMethod(xconst);
 108   set_access_flags(access_flags);
 109   set_intrinsic_id(vmIntrinsics::_none);
 110   clear_method_data();
 111   clear_method_counters();
 112   set_vtable_index(Method::garbage_vtable_index);
 113 
 114   // Fix and bury in Method*
 115   set_interpreter_entry(nullptr); // sets i2i entry and from_int
 116   set_adapter_entry(nullptr);
 117   Method::clear_code(); // from_c/from_i get set to c2i/i2i
 118 
 119   if (access_flags.is_native()) {
 120     clear_native_function();
 121     set_signature_handler(nullptr);
 122   }
 123 
 124   NOT_PRODUCT(set_compiled_invocation_count(0);)
 125   // Name is very useful for debugging.
 126   NOT_PRODUCT(_name = name;)
 127 }
 128 
 129 // Release Method*.  The nmethod will be gone when we get here because
 130 // we've walked the code cache.
 131 void Method::deallocate_contents(ClassLoaderData* loader_data) {
 132   MetadataFactory::free_metadata(loader_data, constMethod());
 133   set_constMethod(nullptr);
 134   MetadataFactory::free_metadata(loader_data, method_data());
 135   clear_method_data();
 136   MetadataFactory::free_metadata(loader_data, method_counters());
 137   clear_method_counters();
 138   // The nmethod will be gone when we get here.
 139   if (code() != nullptr) _code = nullptr;
 140 }
 141 
 142 void Method::release_C_heap_structures() {
 143   if (method_data()) {
 144     method_data()->release_C_heap_structures();
 145 
 146     // Destroy MethodData embedded lock
 147     method_data()->~MethodData();
 148   }
 149 }
 150 
 151 address Method::get_i2c_entry() {
 152   assert(adapter() != nullptr, "must have");
 153   return adapter()->get_i2c_entry();
 154 }
 155 
 156 address Method::get_c2i_entry() {
 157   assert(adapter() != nullptr, "must have");
 158   return adapter()->get_c2i_entry();
 159 }
 160 
 161 address Method::get_c2i_unverified_entry() {
 162   assert(adapter() != nullptr, "must have");
 163   return adapter()->get_c2i_unverified_entry();
 164 }
 165 
 166 address Method::get_c2i_no_clinit_check_entry() {
 167   assert(VM_Version::supports_fast_class_init_checks(), "");
 168   assert(adapter() != nullptr, "must have");
 169   return adapter()->get_c2i_no_clinit_check_entry();
 170 }
 171 
 172 char* Method::name_and_sig_as_C_string() const {
 173   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 174 }
 175 
 176 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 177   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 178 }
 179 
 180 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 181   const char* klass_name = klass->external_name();
 182   int klass_name_len  = (int)strlen(klass_name);
 183   int method_name_len = method_name->utf8_length();
 184   int len             = klass_name_len + 2 + method_name_len + signature->utf8_length();
 185   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 186   strcpy(dest, klass_name);
 187   dest[klass_name_len + 0] = ':';
 188   dest[klass_name_len + 1] = ':';
 189   strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
 190   strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
 191   dest[len] = 0;
 192   return dest;
 193 }
 194 
 195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 196   Symbol* klass_name = klass->name();
 197   klass_name->as_klass_external_name(buf, size);
 198   int len = (int)strlen(buf);
 199 
 200   if (len < size - 1) {
 201     buf[len++] = '.';
 202 
 203     method_name->as_C_string(&(buf[len]), size - len);
 204     len = (int)strlen(buf);
 205 
 206     signature->as_C_string(&(buf[len]), size - len);
 207   }
 208 
 209   return buf;
 210 }
 211 
 212 const char* Method::external_name() const {
 213   return external_name(constants()->pool_holder(), name(), signature());
 214 }
 215 
 216 void Method::print_external_name(outputStream *os) const {
 217   print_external_name(os, constants()->pool_holder(), name(), signature());
 218 }
 219 
 220 const char* Method::external_name(Klass* klass, Symbol* method_name, Symbol* signature) {
 221   stringStream ss;
 222   print_external_name(&ss, klass, method_name, signature);
 223   return ss.as_string();
 224 }
 225 
 226 void Method::print_external_name(outputStream *os, Klass* klass, Symbol* method_name, Symbol* signature) {
 227   signature->print_as_signature_external_return_type(os);
 228   os->print(" %s.%s(", klass->external_name(), method_name->as_C_string());
 229   signature->print_as_signature_external_parameters(os);
 230   os->print(")");
 231 }
 232 
 233 int Method::fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS) {
 234   if (log_is_enabled(Debug, exceptions)) {
 235     ResourceMark rm(THREAD);
 236     log_debug(exceptions)("Looking for catch handler for exception of type \"%s\" in method \"%s\"",
 237                           ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string());
 238   }
 239   // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
 240   // access exception table
 241   ExceptionTable table(mh());
 242   int length = table.length();
 243   // iterate through all entries sequentially
 244   constantPoolHandle pool(THREAD, mh->constants());
 245   for (int i = 0; i < length; i ++) {
 246     //reacquire the table in case a GC happened
 247     ExceptionTable table(mh());
 248     int beg_bci = table.start_pc(i);
 249     int end_bci = table.end_pc(i);
 250     assert(beg_bci <= end_bci, "inconsistent exception table");
 251     log_debug(exceptions)("  - checking exception table entry for BCI %d to %d",
 252                          beg_bci, end_bci);
 253 
 254     if (beg_bci <= throw_bci && throw_bci < end_bci) {
 255       // exception handler bci range covers throw_bci => investigate further
 256       log_debug(exceptions)("    - entry covers throw point BCI %d", throw_bci);
 257 
 258       int handler_bci = table.handler_pc(i);
 259       int klass_index = table.catch_type_index(i);
 260       if (klass_index == 0) {
 261         if (log_is_enabled(Info, exceptions)) {
 262           ResourceMark rm(THREAD);
 263           log_info(exceptions)("Found catch-all handler for exception of type \"%s\" in method \"%s\" at BCI: %d",
 264                                ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string(), handler_bci);
 265         }
 266         return handler_bci;
 267       } else if (ex_klass == nullptr) {
 268         // Is this even possible?
 269         if (log_is_enabled(Info, exceptions)) {
 270           ResourceMark rm(THREAD);
 271           log_info(exceptions)("null exception class is implicitly caught by handler in method \"%s\" at BCI: %d",
 272                                mh()->name()->as_C_string(), handler_bci);
 273         }
 274         return handler_bci;
 275       } else {
 276         if (log_is_enabled(Debug, exceptions)) {
 277           ResourceMark rm(THREAD);
 278           log_debug(exceptions)("    - resolving catch type \"%s\"",
 279                                pool->klass_name_at(klass_index)->as_C_string());
 280         }
 281         // we know the exception class => get the constraint class
 282         // this may require loading of the constraint class; if verification
 283         // fails or some other exception occurs, return handler_bci
 284         Klass* k = pool->klass_at(klass_index, THREAD);
 285         if (HAS_PENDING_EXCEPTION) {
 286           if (log_is_enabled(Debug, exceptions)) {
 287             ResourceMark rm(THREAD);
 288             log_debug(exceptions)("    - exception \"%s\" occurred resolving catch type",
 289                                  PENDING_EXCEPTION->klass()->external_name());
 290           }
 291           return handler_bci;
 292         }
 293         assert(k != nullptr, "klass not loaded");
 294         if (ex_klass->is_subtype_of(k)) {
 295           if (log_is_enabled(Info, exceptions)) {
 296             ResourceMark rm(THREAD);
 297             log_info(exceptions)("Found matching handler for exception of type \"%s\" in method \"%s\" at BCI: %d",
 298                                  ex_klass == nullptr ? "null" : ex_klass->external_name(), mh->name()->as_C_string(), handler_bci);
 299           }
 300           return handler_bci;
 301         }
 302       }
 303     }
 304   }
 305 
 306   if (log_is_enabled(Debug, exceptions)) {
 307     ResourceMark rm(THREAD);
 308     log_debug(exceptions)("No catch handler found for exception of type \"%s\" in method \"%s\"",
 309                           ex_klass->external_name(), mh->name()->as_C_string());
 310   }
 311 
 312   return -1;
 313 }
 314 
 315 void Method::mask_for(int bci, InterpreterOopMap* mask) {
 316   methodHandle h_this(Thread::current(), this);
 317   // Only GC uses the OopMapCache during thread stack root scanning
 318   // any other uses generate an oopmap but do not save it in the cache.
 319   if (Universe::heap()->is_gc_active()) {
 320     method_holder()->mask_for(h_this, bci, mask);
 321   } else {
 322     OopMapCache::compute_one_oop_map(h_this, bci, mask);
 323   }
 324   return;
 325 }
 326 
 327 
 328 int Method::bci_from(address bcp) const {
 329   if (is_native() && bcp == 0) {
 330     return 0;
 331   }
 332   // Do not have a ResourceMark here because AsyncGetCallTrace stack walking code
 333   // may call this after interrupting a nested ResourceMark.
 334   assert((is_native() && bcp == code_base()) || contains(bcp) || VMError::is_error_reported(),
 335          "bcp doesn't belong to this method. bcp: " PTR_FORMAT, p2i(bcp));
 336 
 337   return int(bcp - code_base());
 338 }
 339 
 340 
 341 int Method::validate_bci(int bci) const {
 342   return (bci == 0 || bci < code_size()) ? bci : -1;
 343 }
 344 
 345 // Return bci if it appears to be a valid bcp
 346 // Return -1 otherwise.
 347 // Used by profiling code, when invalid data is a possibility.
 348 // The caller is responsible for validating the Method* itself.
 349 int Method::validate_bci_from_bcp(address bcp) const {
 350   // keep bci as -1 if not a valid bci
 351   int bci = -1;
 352   if (bcp == 0 || bcp == code_base()) {
 353     // code_size() may return 0 and we allow 0 here
 354     // the method may be native
 355     bci = 0;
 356   } else if (contains(bcp)) {
 357     bci = int(bcp - code_base());
 358   }
 359   // Assert that if we have dodged any asserts, bci is negative.
 360   assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
 361   return bci;
 362 }
 363 
 364 address Method::bcp_from(int bci) const {
 365   assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()),
 366          "illegal bci: %d for %s method", bci, is_native() ? "native" : "non-native");
 367   address bcp = code_base() + bci;
 368   assert((is_native() && bcp == code_base()) || contains(bcp), "bcp doesn't belong to this method");
 369   return bcp;
 370 }
 371 
 372 address Method::bcp_from(address bcp) const {
 373   if (is_native() && bcp == nullptr) {
 374     return code_base();
 375   } else {
 376     return bcp;
 377   }
 378 }
 379 
 380 int Method::size(bool is_native) {
 381   // If native, then include pointers for native_function and signature_handler
 382   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 383   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 384   return align_metadata_size(header_size() + extra_words);
 385 }
 386 
 387 Symbol* Method::klass_name() const {
 388   return method_holder()->name();
 389 }
 390 
 391 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 392   LogStreamHandle(Trace, cds) lsh;
 393   if (lsh.is_enabled()) {
 394     lsh.print("Iter(Method): %p ", this);
 395     print_external_name(&lsh);
 396     lsh.cr();
 397   }
 398   if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
 399     // holder is null for MH intrinsic methods
 400     it->push(&_constMethod, MetaspaceClosure::_writable);
 401   } else {
 402     it->push(&_constMethod);
 403   }
 404   it->push(&_method_data);
 405   it->push(&_method_counters);
 406   NOT_PRODUCT(it->push(&_name);)
 407 }
 408 
 409 #if INCLUDE_CDS
 410 // Attempt to return method to original state.  Clear any pointers
 411 // (to objects outside the shared spaces).  We won't be able to predict
 412 // where they should point in a new JVM.  Further initialize some
 413 // entries now in order allow them to be write protected later.
 414 
 415 void Method::remove_unshareable_info() {
 416   unlink_method();
 417   if (method_data() != nullptr) {
 418     method_data()->remove_unshareable_info();
 419   }
 420   if (method_counters() != nullptr) {
 421     method_counters()->remove_unshareable_info();
 422   }
 423   JFR_ONLY(REMOVE_METHOD_ID(this);)
 424 }
 425 
 426 void Method::restore_unshareable_info(TRAPS) {
 427   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 428   if (method_data() != nullptr) {
 429     method_data()->restore_unshareable_info(CHECK);
 430   }
 431   if (method_counters() != nullptr) {
 432     method_counters()->restore_unshareable_info(CHECK);
 433   }
 434   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
 435   assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
 436 }
 437 #endif
 438 
 439 void Method::set_vtable_index(int index) {
 440   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 441     // At runtime initialize_vtable is rerun as part of link_class_impl()
 442     // for a shared class loaded by the non-boot loader to obtain the loader
 443     // constraints based on the runtime classloaders' context.
 444     return; // don't write into the shared class
 445   } else {
 446     _vtable_index = index;
 447   }
 448 }
 449 
 450 void Method::set_itable_index(int index) {
 451   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 452     // At runtime initialize_itable is rerun as part of link_class_impl()
 453     // for a shared class loaded by the non-boot loader to obtain the loader
 454     // constraints based on the runtime classloaders' context. The dumptime
 455     // itable index should be the same as the runtime index.
 456     assert(_vtable_index == itable_index_max - index,
 457            "archived itable index is different from runtime index");
 458     return; // don’t write into the shared class
 459   } else {
 460     _vtable_index = itable_index_max - index;
 461   }
 462   assert(valid_itable_index(), "");
 463 }
 464 
 465 // The RegisterNatives call being attempted tried to register with a method that
 466 // is not native.  Ask JVM TI what prefixes have been specified.  Then check
 467 // to see if the native method is now wrapped with the prefixes.  See the
 468 // SetNativeMethodPrefix(es) functions in the JVM TI Spec for details.
 469 static Method* find_prefixed_native(Klass* k, Symbol* name, Symbol* signature, TRAPS) {
 470 #if INCLUDE_JVMTI
 471   ResourceMark rm(THREAD);
 472   Method* method;
 473   int name_len = name->utf8_length();
 474   char* name_str = name->as_utf8();
 475   int prefix_count;
 476   char** prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
 477   for (int i = 0; i < prefix_count; i++) {
 478     char* prefix = prefixes[i];
 479     int prefix_len = (int)strlen(prefix);
 480 
 481     // try adding this prefix to the method name and see if it matches another method name
 482     int trial_len = name_len + prefix_len;
 483     char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
 484     strcpy(trial_name_str, prefix);
 485     strcat(trial_name_str, name_str);
 486     TempNewSymbol trial_name = SymbolTable::probe(trial_name_str, trial_len);
 487     if (trial_name == nullptr) {
 488       continue; // no such symbol, so this prefix wasn't used, try the next prefix
 489     }
 490     method = k->lookup_method(trial_name, signature);
 491     if (method == nullptr) {
 492       continue; // signature doesn't match, try the next prefix
 493     }
 494     if (method->is_native()) {
 495       method->set_is_prefixed_native();
 496       return method; // wahoo, we found a prefixed version of the method, return it
 497     }
 498     // found as non-native, so prefix is good, add it, probably just need more prefixes
 499     name_len = trial_len;
 500     name_str = trial_name_str;
 501   }
 502 #endif // INCLUDE_JVMTI
 503   return nullptr; // not found
 504 }
 505 
 506 bool Method::register_native(Klass* k, Symbol* name, Symbol* signature, address entry, TRAPS) {
 507   Method* method = k->lookup_method(name, signature);
 508   if (method == nullptr) {
 509     ResourceMark rm(THREAD);
 510     stringStream st;
 511     st.print("Method '");
 512     print_external_name(&st, k, name, signature);
 513     st.print("' name or signature does not match");
 514     THROW_MSG_(vmSymbols::java_lang_NoSuchMethodError(), st.as_string(), false);
 515   }
 516   if (!method->is_native()) {
 517     // trying to register to a non-native method, see if a JVM TI agent has added prefix(es)
 518     method = find_prefixed_native(k, name, signature, THREAD);
 519     if (method == nullptr) {
 520       ResourceMark rm(THREAD);
 521       stringStream st;
 522       st.print("Method '");
 523       print_external_name(&st, k, name, signature);
 524       st.print("' is not declared as native");
 525       THROW_MSG_(vmSymbols::java_lang_NoSuchMethodError(), st.as_string(), false);
 526     }
 527   }
 528 
 529   if (entry != nullptr) {
 530     method->set_native_function(entry, native_bind_event_is_interesting);
 531   } else {
 532     method->clear_native_function();
 533   }
 534   if (log_is_enabled(Debug, jni, resolve)) {
 535     ResourceMark rm(THREAD);
 536     log_debug(jni, resolve)("[Registering JNI native method %s.%s]",
 537                             method->method_holder()->external_name(),
 538                             method->name()->as_C_string());
 539   }
 540   return true;
 541 }
 542 
 543 bool Method::was_executed_more_than(int n) {
 544   // Invocation counter is reset when the Method* is compiled.
 545   // If the method has compiled code we therefore assume it has
 546   // be executed more than n times.
 547   if (is_accessor() || is_empty_method() || (code() != nullptr)) {
 548     // interpreter doesn't bump invocation counter of trivial methods
 549     // compiler does not bump invocation counter of compiled methods
 550     return true;
 551   }
 552   else if ((method_counters() != nullptr &&
 553             method_counters()->invocation_counter()->carry()) ||
 554            (method_data() != nullptr &&
 555             method_data()->invocation_counter()->carry())) {
 556     // The carry bit is set when the counter overflows and causes
 557     // a compilation to occur.  We don't know how many times
 558     // the counter has been reset, so we simply assume it has
 559     // been executed more than n times.
 560     return true;
 561   } else {
 562     return invocation_count() > n;
 563   }
 564 }
 565 
 566 void Method::print_invocation_count(outputStream* st) {
 567   //---<  compose+print method return type, klass, name, and signature  >---
 568   if (is_static())       { st->print("static "); }
 569   if (is_final())        { st->print("final "); }
 570   if (is_synchronized()) { st->print("synchronized "); }
 571   if (is_native())       { st->print("native "); }
 572   st->print("%s::", method_holder()->external_name());
 573   name()->print_symbol_on(st);
 574   signature()->print_symbol_on(st);
 575 
 576   if (WizardMode) {
 577     // dump the size of the byte codes
 578     st->print(" {%d}", code_size());
 579   }
 580   st->cr();
 581 
 582   // Counting based on signed int counters tends to overflow with
 583   // longer-running workloads on fast machines. The counters under
 584   // consideration here, however, are limited in range by counting
 585   // logic. See InvocationCounter:count_limit for example.
 586   // No "overflow precautions" need to be implemented here.
 587   st->print_cr ("  interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
 588   st->print_cr ("  invocation_counter:           " INT32_FORMAT_W(11), invocation_count());
 589   st->print_cr ("  backedge_counter:             " INT32_FORMAT_W(11), backedge_count());
 590 
 591   if (method_data() != nullptr) {
 592     st->print_cr ("  decompile_count:              " UINT32_FORMAT_W(11), method_data()->decompile_count());
 593   }
 594 
 595 #ifndef PRODUCT
 596   if (CountCompiledCalls) {
 597     st->print_cr ("  compiled_invocation_count:    " INT64_FORMAT_W(11), compiled_invocation_count());
 598   }
 599 #endif
 600 }
 601 
 602 MethodTrainingData* Method::training_data_or_null() const {
 603   MethodCounters* mcs = method_counters();
 604   if (mcs == nullptr) {
 605     return nullptr;
 606   } else {
 607     return mcs->method_training_data();
 608   }
 609 }
 610 
 611 bool Method::init_training_data(MethodTrainingData* tdata) {
 612   MethodCounters* mcs = method_counters();
 613   if (mcs == nullptr) {
 614     return false;
 615   } else {
 616     return mcs->init_method_training_data(tdata);
 617   }
 618 }
 619 
 620 bool Method::install_training_method_data(const methodHandle& method) {
 621   MethodTrainingData* mtd = MethodTrainingData::find(method);
 622   if (mtd != nullptr && mtd->has_holder() && mtd->final_profile() != nullptr &&
 623       mtd->holder() == method() && mtd->final_profile()->method() == method()) { // FIXME
 624     Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
 625     return true;
 626   }
 627   return false;
 628 }
 629 
 630 // Build a MethodData* object to hold profiling information collected on this
 631 // method when requested.
 632 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
 633   if (install_training_method_data(method)) {
 634     return;
 635   }
 636   // Do not profile the method if metaspace has hit an OOM previously
 637   // allocating profiling data. Callers clear pending exception so don't
 638   // add one here.
 639   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 640     return;
 641   }
 642 
 643   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 644   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 645   if (HAS_PENDING_EXCEPTION) {
 646     CompileBroker::log_metaspace_failure();
 647     ClassLoaderDataGraph::set_metaspace_oom(true);
 648     return;   // return the exception (which is cleared)
 649   }
 650 
 651   if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
 652     MetadataFactory::free_metadata(loader_data, method_data);
 653     return;
 654   }
 655 
 656   /*
 657   LogStreamHandle(Info, mdo) lsh;
 658   if (lsh.is_enabled()) {
 659     ResourceMark rm(THREAD);
 660     lsh.print("build_profiling_method_data for ");
 661     method->print_name(&lsh);
 662     lsh.cr();
 663   }
 664   */
 665   if (ForceProfiling && TrainingData::need_data()) {
 666     MethodTrainingData* mtd = MethodTrainingData::make(method, false);
 667     guarantee(mtd != nullptr, "");
 668   }
 669   if (PrintMethodData) {
 670     ResourceMark rm(THREAD);
 671     tty->print("build_profiling_method_data for ");
 672     method->print_name(tty);
 673     tty->cr();
 674     // At the end of the run, the MDO, full of data, will be dumped.
 675   }
 676 }
 677 
 678 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 679   // Do not profile the method if metaspace has hit an OOM previously
 680   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 681     return nullptr;
 682   }
 683 
 684   methodHandle mh(current, m);
 685   MethodCounters* counters;
 686   if (current->is_Java_thread()) {
 687     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 688     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 689     // if needed.
 690     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 691     if (HAS_PENDING_EXCEPTION) {
 692       CLEAR_PENDING_EXCEPTION;
 693     }
 694   } else {
 695     // Call metaspace allocation that doesn't throw exception if the
 696     // current thread isn't a JavaThread, ie. the VMThread.
 697     counters = MethodCounters::allocate_no_exception(mh);
 698   }
 699 
 700   if (counters == nullptr) {
 701     CompileBroker::log_metaspace_failure();
 702     ClassLoaderDataGraph::set_metaspace_oom(true);
 703     return nullptr;
 704   }
 705 
 706   if (!mh->init_method_counters(counters)) {
 707     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 708   }
 709 
 710   if (ForceProfiling && TrainingData::need_data()) {
 711     MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
 712     guarantee(mtd != nullptr, "");
 713   }
 714 
 715   return mh->method_counters();
 716 }
 717 
 718 bool Method::init_method_counters(MethodCounters* counters) {
 719   // Try to install a pointer to MethodCounters, return true on success.
 720   return Atomic::replace_if_null(&_method_counters, counters);
 721 }
 722 
 723 void Method::set_exception_handler_entered(int handler_bci) {
 724   if (ProfileExceptionHandlers) {
 725     MethodData* mdo = method_data();
 726     if (mdo != nullptr) {
 727       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 728       handler_data.set_exception_handler_entered();
 729     }
 730   }
 731 }
 732 
 733 int Method::extra_stack_words() {
 734   // not an inline function, to avoid a header dependency on Interpreter
 735   return extra_stack_entries() * Interpreter::stackElementSize;
 736 }
 737 
 738 bool Method::is_vanilla_constructor() const {
 739   // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
 740   // which only calls the superclass vanilla constructor and possibly does stores of
 741   // zero constants to local fields:
 742   //
 743   //   aload_0
 744   //   invokespecial
 745   //   indexbyte1
 746   //   indexbyte2
 747   //
 748   // followed by an (optional) sequence of:
 749   //
 750   //   aload_0
 751   //   aconst_null / iconst_0 / fconst_0 / dconst_0
 752   //   putfield
 753   //   indexbyte1
 754   //   indexbyte2
 755   //
 756   // followed by:
 757   //
 758   //   return
 759 
 760   assert(name() == vmSymbols::object_initializer_name(),    "Should only be called for default constructors");
 761   assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
 762   int size = code_size();
 763   // Check if size match
 764   if (size == 0 || size % 5 != 0) return false;
 765   address cb = code_base();
 766   int last = size - 1;
 767   if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
 768     // Does not call superclass default constructor
 769     return false;
 770   }
 771   // Check optional sequence
 772   for (int i = 4; i < last; i += 5) {
 773     if (cb[i] != Bytecodes::_aload_0) return false;
 774     if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
 775     if (cb[i+2] != Bytecodes::_putfield) return false;
 776   }
 777   return true;
 778 }
 779 
 780 
 781 bool Method::compute_has_loops_flag() {
 782   BytecodeStream bcs(methodHandle(Thread::current(), this));
 783   Bytecodes::Code bc;
 784 
 785   while ((bc = bcs.next()) >= 0) {
 786     switch (bc) {
 787       case Bytecodes::_ifeq:
 788       case Bytecodes::_ifnull:
 789       case Bytecodes::_iflt:
 790       case Bytecodes::_ifle:
 791       case Bytecodes::_ifne:
 792       case Bytecodes::_ifnonnull:
 793       case Bytecodes::_ifgt:
 794       case Bytecodes::_ifge:
 795       case Bytecodes::_if_icmpeq:
 796       case Bytecodes::_if_icmpne:
 797       case Bytecodes::_if_icmplt:
 798       case Bytecodes::_if_icmpgt:
 799       case Bytecodes::_if_icmple:
 800       case Bytecodes::_if_icmpge:
 801       case Bytecodes::_if_acmpeq:
 802       case Bytecodes::_if_acmpne:
 803       case Bytecodes::_goto:
 804       case Bytecodes::_jsr:
 805         if (bcs.dest() < bcs.next_bci()) {
 806           return set_has_loops();
 807         }
 808         break;
 809 
 810       case Bytecodes::_goto_w:
 811       case Bytecodes::_jsr_w:
 812         if (bcs.dest_w() < bcs.next_bci()) {
 813           return set_has_loops();
 814         }
 815         break;
 816 
 817       case Bytecodes::_lookupswitch: {
 818         Bytecode_lookupswitch lookupswitch(this, bcs.bcp());
 819         if (lookupswitch.default_offset() < 0) {
 820           return set_has_loops();
 821         } else {
 822           for (int i = 0; i < lookupswitch.number_of_pairs(); ++i) {
 823             LookupswitchPair pair = lookupswitch.pair_at(i);
 824             if (pair.offset() < 0) {
 825               return set_has_loops();
 826             }
 827           }
 828         }
 829         break;
 830       }
 831       case Bytecodes::_tableswitch: {
 832         Bytecode_tableswitch tableswitch(this, bcs.bcp());
 833         if (tableswitch.default_offset() < 0) {
 834           return set_has_loops();
 835         } else {
 836           for (int i = 0; i < tableswitch.length(); ++i) {
 837             if (tableswitch.dest_offset_at(i) < 0) {
 838               return set_has_loops();
 839             }
 840           }
 841         }
 842         break;
 843       }
 844       default:
 845         break;
 846     }
 847   }
 848 
 849   _flags.set_has_loops_flag_init(true);
 850   return false;
 851 }
 852 
 853 bool Method::is_final_method(AccessFlags class_access_flags) const {
 854   // or "does_not_require_vtable_entry"
 855   // default method or overpass can occur, is not final (reuses vtable entry)
 856   // private methods in classes get vtable entries for backward class compatibility.
 857   if (is_overpass() || is_default_method())  return false;
 858   return is_final() || class_access_flags.is_final();
 859 }
 860 
 861 bool Method::is_final_method() const {
 862   return is_final_method(method_holder()->access_flags());
 863 }
 864 
 865 bool Method::is_default_method() const {
 866   if (method_holder() != nullptr &&
 867       method_holder()->is_interface() &&
 868       !is_abstract() && !is_private()) {
 869     return true;
 870   } else {
 871     return false;
 872   }
 873 }
 874 
 875 bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
 876   if (is_final_method(class_access_flags))  return true;
 877 #ifdef ASSERT
 878   bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
 879   if (class_access_flags.is_interface()) {
 880       ResourceMark rm;
 881       assert(is_nonv == is_static() || is_nonv == is_private(),
 882              "nonvirtual unexpected for non-static, non-private: %s",
 883              name_and_sig_as_C_string());
 884   }
 885 #endif
 886   assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
 887   return vtable_index() == nonvirtual_vtable_index;
 888 }
 889 
 890 bool Method::can_be_statically_bound() const {
 891   return can_be_statically_bound(method_holder()->access_flags());
 892 }
 893 
 894 bool Method::can_be_statically_bound(InstanceKlass* context) const {
 895   return (method_holder() == context) && can_be_statically_bound();
 896 }
 897 
 898 /**
 899  *  Returns false if this is one of specially treated methods for
 900  *  which we have to provide stack trace in throw in compiled code.
 901  *  Returns true otherwise.
 902  */
 903 bool Method::can_omit_stack_trace() {
 904   if (klass_name() == vmSymbols::sun_invoke_util_ValueConversions()) {
 905     return false; // All methods in sun.invoke.util.ValueConversions
 906   }
 907   return true;
 908 }
 909 
 910 bool Method::is_accessor() const {
 911   return is_getter() || is_setter();
 912 }
 913 
 914 bool Method::is_getter() const {
 915   if (code_size() != 5) return false;
 916   if (size_of_parameters() != 1) return false;
 917   if (java_code_at(0) != Bytecodes::_aload_0)  return false;
 918   if (java_code_at(1) != Bytecodes::_getfield) return false;
 919   switch (java_code_at(4)) {
 920     case Bytecodes::_ireturn:
 921     case Bytecodes::_lreturn:
 922     case Bytecodes::_freturn:
 923     case Bytecodes::_dreturn:
 924     case Bytecodes::_areturn:
 925       break;
 926     default:
 927       return false;
 928   }
 929   return true;
 930 }
 931 
 932 bool Method::is_setter() const {
 933   if (code_size() != 6) return false;
 934   if (java_code_at(0) != Bytecodes::_aload_0) return false;
 935   switch (java_code_at(1)) {
 936     case Bytecodes::_iload_1:
 937     case Bytecodes::_aload_1:
 938     case Bytecodes::_fload_1:
 939       if (size_of_parameters() != 2) return false;
 940       break;
 941     case Bytecodes::_dload_1:
 942     case Bytecodes::_lload_1:
 943       if (size_of_parameters() != 3) return false;
 944       break;
 945     default:
 946       return false;
 947   }
 948   if (java_code_at(2) != Bytecodes::_putfield) return false;
 949   if (java_code_at(5) != Bytecodes::_return)   return false;
 950   return true;
 951 }
 952 
 953 bool Method::is_constant_getter() const {
 954   int last_index = code_size() - 1;
 955   // Check if the first 1-3 bytecodes are a constant push
 956   // and the last bytecode is a return.
 957   return (2 <= code_size() && code_size() <= 4 &&
 958           Bytecodes::is_const(java_code_at(0)) &&
 959           Bytecodes::length_for(java_code_at(0)) == last_index &&
 960           Bytecodes::is_return(java_code_at(last_index)));
 961 }
 962 
 963 bool Method::is_initializer() const {
 964   return is_object_initializer() || is_static_initializer();
 965 }
 966 
 967 bool Method::has_valid_initializer_flags() const {
 968   return (is_static() ||
 969           method_holder()->major_version() < 51);
 970 }
 971 
 972 bool Method::is_static_initializer() const {
 973   // For classfiles version 51 or greater, ensure that the clinit method is
 974   // static.  Non-static methods with the name "<clinit>" are not static
 975   // initializers. (older classfiles exempted for backward compatibility)
 976   return name() == vmSymbols::class_initializer_name() &&
 977          has_valid_initializer_flags();
 978 }
 979 
 980 bool Method::is_object_initializer() const {
 981    return name() == vmSymbols::object_initializer_name();
 982 }
 983 
 984 bool Method::needs_clinit_barrier() const {
 985   return is_static() && !method_holder()->is_initialized();
 986 }
 987 
 988 bool Method::code_has_clinit_barriers() const {
 989   CompiledMethod* nm = code();
 990   return (nm != nullptr) && nm->has_clinit_barriers();
 991 }
 992 
 993 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 994   int length = method->checked_exceptions_length();
 995   if (length == 0) {  // common case
 996     return objArrayHandle(THREAD, Universe::the_empty_class_array());
 997   } else {
 998     methodHandle h_this(THREAD, method);
 999     objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
1000     objArrayHandle mirrors (THREAD, m_oop);
1001     for (int i = 0; i < length; i++) {
1002       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
1003       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
1004       if (log_is_enabled(Warning, exceptions) &&
1005           !k->is_subclass_of(vmClasses::Throwable_klass())) {
1006         ResourceMark rm(THREAD);
1007         log_warning(exceptions)(
1008           "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable",
1009           k->external_name(), method->external_name());
1010       }
1011       mirrors->obj_at_put(i, k->java_mirror());
1012     }
1013     return mirrors;
1014   }
1015 };
1016 
1017 
1018 int Method::line_number_from_bci(int bci) const {
1019   int best_bci  =  0;
1020   int best_line = -1;
1021   if (bci == SynchronizationEntryBCI) bci = 0;
1022   if (0 <= bci && bci < code_size() && has_linenumber_table()) {
1023     // The line numbers are a short array of 2-tuples [start_pc, line_number].
1024     // Not necessarily sorted and not necessarily one-to-one.
1025     CompressedLineNumberReadStream stream(compressed_linenumber_table());
1026     while (stream.read_pair()) {
1027       if (stream.bci() == bci) {
1028         // perfect match
1029         return stream.line();
1030       } else {
1031         // update best_bci/line
1032         if (stream.bci() < bci && stream.bci() >= best_bci) {
1033           best_bci  = stream.bci();
1034           best_line = stream.line();
1035         }
1036       }
1037     }
1038   }
1039   return best_line;
1040 }
1041 
1042 
1043 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
1044   if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
1045     Thread *thread = Thread::current();
1046     Symbol* klass_name = constants()->klass_name_at(klass_index);
1047     Handle loader(thread, method_holder()->class_loader());
1048     Handle prot  (thread, method_holder()->protection_domain());
1049     return SystemDictionary::find_instance_klass(thread, klass_name, loader, prot) != nullptr;
1050   } else {
1051     return true;
1052   }
1053 }
1054 
1055 
1056 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1057   int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1058   if (must_be_resolved) {
1059     // Make sure klass is resolved in constantpool.
1060     if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
1061   }
1062   return is_klass_loaded_by_klass_index(klass_index);
1063 }
1064 
1065 
1066 void Method::set_native_function(address function, bool post_event_flag) {
1067   assert(function != nullptr, "use clear_native_function to unregister natives");
1068   assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1069   address* native_function = native_function_addr();
1070 
1071   // We can see racers trying to place the same native function into place. Once
1072   // is plenty.
1073   address current = *native_function;
1074   if (current == function) return;
1075   if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1076       function != nullptr) {
1077     // native_method_throw_unsatisfied_link_error_entry() should only
1078     // be passed when post_event_flag is false.
1079     assert(function !=
1080       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1081       "post_event_flag mismatch");
1082 
1083     // post the bind event, and possible change the bind function
1084     JvmtiExport::post_native_method_bind(this, &function);
1085   }
1086   *native_function = function;
1087   // This function can be called more than once. We must make sure that we always
1088   // use the latest registered method -> check if a stub already has been generated.
1089   // If so, we have to make it not_entrant.
1090   CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
1091   if (nm != nullptr) {
1092     nm->make_not_entrant();
1093   }
1094 }
1095 
1096 
1097 bool Method::has_native_function() const {
1098   if (is_special_native_intrinsic())
1099     return false;  // special-cased in SharedRuntime::generate_native_wrapper
1100   address func = native_function();
1101   return (func != nullptr && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
1102 }
1103 
1104 
1105 void Method::clear_native_function() {
1106   // Note: is_method_handle_intrinsic() is allowed here.
1107   set_native_function(
1108     SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1109     !native_bind_event_is_interesting);
1110   this->unlink_code();
1111 }
1112 
1113 
1114 void Method::set_signature_handler(address handler) {
1115   address* signature_handler =  signature_handler_addr();
1116   *signature_handler = handler;
1117 }
1118 
1119 
1120 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
1121   assert(reason != nullptr, "must provide a reason");
1122   if (PrintCompilation && report) {
1123     ttyLocker ttyl;
1124     tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
1125     if (comp_level == CompLevel_all) {
1126       tty->print("all levels ");
1127     } else {
1128       tty->print("level %d ", comp_level);
1129     }
1130     this->print_short_name(tty);
1131     int size = this->code_size();
1132     if (size > 0) {
1133       tty->print(" (%d bytes)", size);
1134     }
1135     if (reason != nullptr) {
1136       tty->print("   %s", reason);
1137     }
1138     tty->cr();
1139   }
1140   if ((TraceDeoptimization || LogCompilation) && (xtty != nullptr)) {
1141     ttyLocker ttyl;
1142     xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
1143                      os::current_thread_id(), is_osr, comp_level);
1144     if (reason != nullptr) {
1145       xtty->print(" reason=\'%s\'", reason);
1146     }
1147     xtty->method(this);
1148     xtty->stamp();
1149     xtty->end_elem();
1150   }
1151 }
1152 
1153 bool Method::is_always_compilable() const {
1154   // Generated adapters must be compiled
1155   if (is_special_native_intrinsic() && is_synthetic()) {
1156     assert(!is_not_c1_compilable(), "sanity check");
1157     assert(!is_not_c2_compilable(), "sanity check");
1158     return true;
1159   }
1160 
1161   return false;
1162 }
1163 
1164 bool Method::is_not_compilable(int comp_level) const {
1165   if (number_of_breakpoints() > 0)
1166     return true;
1167   if (is_always_compilable())
1168     return false;
1169   if (comp_level == CompLevel_any)
1170     return is_not_c1_compilable() && is_not_c2_compilable();
1171   if (is_c1_compile(comp_level))
1172     return is_not_c1_compilable();
1173   if (is_c2_compile(comp_level))
1174     return is_not_c2_compilable();
1175   return false;
1176 }
1177 
1178 // call this when compiler finds that this method is not compilable
1179 void Method::set_not_compilable(const char* reason, int comp_level, bool report) {
1180   if (is_always_compilable()) {
1181     // Don't mark a method which should be always compilable
1182     return;
1183   }
1184   print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
1185   if (comp_level == CompLevel_all) {
1186     set_is_not_c1_compilable();
1187     set_is_not_c2_compilable();
1188   } else {
1189     if (is_c1_compile(comp_level))
1190       set_is_not_c1_compilable();
1191     if (is_c2_compile(comp_level))
1192       set_is_not_c2_compilable();
1193   }
1194   assert(!CompilationPolicy::can_be_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1195 }
1196 
1197 bool Method::is_not_osr_compilable(int comp_level) const {
1198   if (is_not_compilable(comp_level))
1199     return true;
1200   if (comp_level == CompLevel_any)
1201     return is_not_c1_osr_compilable() && is_not_c2_osr_compilable();
1202   if (is_c1_compile(comp_level))
1203     return is_not_c1_osr_compilable();
1204   if (is_c2_compile(comp_level))
1205     return is_not_c2_osr_compilable();
1206   return false;
1207 }
1208 
1209 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1210   print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1211   if (comp_level == CompLevel_all) {
1212     set_is_not_c1_osr_compilable();
1213     set_is_not_c2_osr_compilable();
1214   } else {
1215     if (is_c1_compile(comp_level))
1216       set_is_not_c1_osr_compilable();
1217     if (is_c2_compile(comp_level))
1218       set_is_not_c2_osr_compilable();
1219   }
1220   assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1221 }
1222 
1223 // Revert to using the interpreter and clear out the nmethod
1224 void Method::clear_code() {
1225   // this may be null if c2i adapters have not been made yet
1226   // Only should happen at allocate time.
1227   if (adapter() == nullptr) {
1228     _from_compiled_entry    = nullptr;
1229   } else {
1230     _from_compiled_entry    = adapter()->get_c2i_entry();
1231   }
1232   OrderAccess::storestore();
1233   _from_interpreted_entry = _i2i_entry;
1234   OrderAccess::storestore();
1235   _code = nullptr;
1236 }
1237 
1238 void Method::unlink_code(CompiledMethod *compare) {
1239   ConditionalMutexLocker ml(CompiledMethod_lock, !CompiledMethod_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1240   // We need to check if either the _code or _from_compiled_code_entry_point
1241   // refer to this nmethod because there is a race in setting these two fields
1242   // in Method* as seen in bugid 4947125.
1243   if (code() == compare ||
1244       from_compiled_entry() == compare->verified_entry_point()) {
1245     clear_code();
1246   }
1247 }
1248 
1249 void Method::unlink_code() {
1250   ConditionalMutexLocker ml(CompiledMethod_lock, !CompiledMethod_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1251   clear_code();
1252 }
1253 
1254 #if INCLUDE_CDS
1255 // Called by class data sharing to remove any entry points (which are not shared)
1256 void Method::unlink_method() {
1257   assert(CDSConfig::is_dumping_archive(), "sanity");
1258   _code = nullptr;
1259   _adapter = nullptr;
1260   _i2i_entry = nullptr;
1261   _from_compiled_entry = nullptr;
1262   _from_interpreted_entry = nullptr;
1263 
1264   if (is_native()) {
1265     *native_function_addr() = nullptr;
1266     set_signature_handler(nullptr);
1267   }
1268   NOT_PRODUCT(set_compiled_invocation_count(0);)
1269 
1270   clear_method_data();
1271   clear_method_counters();
1272   clear_is_not_c1_compilable();
1273   clear_is_not_c1_osr_compilable();
1274   clear_is_not_c2_compilable();
1275   clear_is_not_c2_osr_compilable();
1276   clear_queued_for_compilation();
1277   set_pending_queue_processed(false);
1278   remove_unshareable_flags();
1279 }
1280 
1281 void Method::remove_unshareable_flags() {
1282   // clear all the flags that shouldn't be in the archived version
1283   assert(!is_old(), "must be");
1284   assert(!is_obsolete(), "must be");
1285   assert(!is_deleted(), "must be");
1286 
1287   set_is_prefixed_native(false);
1288   set_queued_for_compilation(false);
1289   set_pending_queue_processed(false);
1290   set_is_not_c2_compilable(false);
1291   set_is_not_c1_compilable(false);
1292   set_is_not_c2_osr_compilable(false);
1293   set_on_stack_flag(false);
1294 }
1295 #endif
1296 
1297 // Called when the method_holder is getting linked. Setup entrypoints so the method
1298 // is ready to be called from interpreter, compiler, and vtables.
1299 void Method::link_method(const methodHandle& h_method, TRAPS) {
1300   if (UsePerfData) {
1301     ClassLoader::perf_ik_link_methods_count()->inc();
1302   }
1303 
1304   // If the code cache is full, we may reenter this function for the
1305   // leftover methods that weren't linked.
1306   if (adapter() != nullptr) {
1307     return;
1308   }
1309   assert( _code == nullptr, "nothing compiled yet" );
1310 
1311   // Setup interpreter entrypoint
1312   assert(this == h_method(), "wrong h_method()" );
1313 
1314   assert(adapter() == nullptr, "init'd to null");
1315   address entry = Interpreter::entry_for_method(h_method);
1316   assert(entry != nullptr, "interpreter entry must be non-null");
1317   // Sets both _i2i_entry and _from_interpreted_entry
1318   set_interpreter_entry(entry);
1319 
1320   // Don't overwrite already registered native entries.
1321   if (is_native() && !has_native_function()) {
1322     set_native_function(
1323       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1324       !native_bind_event_is_interesting);
1325   }
1326 
1327   // Setup compiler entrypoint.  This is made eagerly, so we do not need
1328   // special handling of vtables.  An alternative is to make adapters more
1329   // lazily by calling make_adapter() from from_compiled_entry() for the
1330   // normal calls.  For vtable calls life gets more complicated.  When a
1331   // call-site goes mega-morphic we need adapters in all methods which can be
1332   // called from the vtable.  We need adapters on such methods that get loaded
1333   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1334   // problem we'll make these lazily later.
1335   (void) make_adapters(h_method, CHECK);
1336 
1337   // ONLY USE the h_method now as make_adapter may have blocked
1338 
1339   if (h_method->is_continuation_native_intrinsic()) {
1340     _from_interpreted_entry = nullptr;
1341     _from_compiled_entry = nullptr;
1342     _i2i_entry = nullptr;
1343     if (Continuations::enabled()) {
1344       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1345       AdapterHandlerLibrary::create_native_wrapper(h_method);
1346       if (!h_method->has_compiled_code()) {
1347         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1348       }
1349       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1350     }
1351   }
1352   if (_preload_code != nullptr) {
1353     MutexLocker ml(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1354     set_code(h_method, _preload_code);
1355     assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
1356   }
1357 }
1358 
1359 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1360   PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1361 
1362   // Adapters for compiled code are made eagerly here.  They are fairly
1363   // small (generally < 100 bytes) and quick to make (and cached and shared)
1364   // so making them eagerly shouldn't be too expensive.
1365   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1366   if (adapter == nullptr ) {
1367     if (!is_init_completed()) {
1368       // Don't throw exceptions during VM initialization because java.lang.* classes
1369       // might not have been initialized, causing problems when constructing the
1370       // Java exception object.
1371       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1372     } else {
1373       THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1374     }
1375   }
1376 
1377   mh->set_adapter_entry(adapter);
1378   mh->_from_compiled_entry = adapter->get_c2i_entry();
1379   return adapter->get_c2i_entry();
1380 }
1381 
1382 // The verified_code_entry() must be called when a invoke is resolved
1383 // on this method.
1384 
1385 // It returns the compiled code entry point, after asserting not null.
1386 // This function is called after potential safepoints so that nmethod
1387 // or adapter that it points to is still live and valid.
1388 // This function must not hit a safepoint!
1389 address Method::verified_code_entry() {
1390   debug_only(NoSafepointVerifier nsv;)
1391   assert(_from_compiled_entry != nullptr, "must be set");
1392   return _from_compiled_entry;
1393 }
1394 
1395 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1396 // (could be racing a deopt).
1397 // Not inline to avoid circular ref.
1398 bool Method::check_code() const {
1399   // cached in a register or local.  There's a race on the value of the field.
1400   CompiledMethod *code = Atomic::load_acquire(&_code);
1401   return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1402 }
1403 
1404 // Install compiled code.  Instantly it can execute.
1405 void Method::set_code(const methodHandle& mh, CompiledMethod *code) {
1406   assert_lock_strong(CompiledMethod_lock);
1407   assert( code, "use clear_code to remove code" );
1408   assert( mh->check_code(), "" );
1409 
1410   guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1411 
1412   // These writes must happen in this order, because the interpreter will
1413   // directly jump to from_interpreted_entry which jumps to an i2c adapter
1414   // which jumps to _from_compiled_entry.
1415   mh->_code = code;             // Assign before allowing compiled code to exec
1416 
1417   int comp_level = code->comp_level();
1418   // In theory there could be a race here. In practice it is unlikely
1419   // and not worth worrying about.
1420   if (comp_level > mh->highest_comp_level()) {
1421     mh->set_highest_comp_level(comp_level);
1422   }
1423 
1424   OrderAccess::storestore();
1425   mh->_from_compiled_entry = code->verified_entry_point();
1426   OrderAccess::storestore();
1427 
1428   if (mh->is_continuation_native_intrinsic()) {
1429     assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1430 
1431     if (mh->is_continuation_enter_intrinsic()) {
1432       // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1433       mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1434     } else if (mh->is_continuation_yield_intrinsic()) {
1435       mh->_i2i_entry = mh->get_i2c_entry();
1436     } else {
1437       guarantee(false, "Unknown Continuation native intrinsic");
1438     }
1439     // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1440     Atomic::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1441   } else if (!mh->is_method_handle_intrinsic()) {
1442     // Instantly compiled code can execute.
1443     mh->_from_interpreted_entry = mh->get_i2c_entry();
1444   }
1445 }
1446 
1447 
1448 bool Method::is_overridden_in(Klass* k) const {
1449   InstanceKlass* ik = InstanceKlass::cast(k);
1450 
1451   if (ik->is_interface()) return false;
1452 
1453   // If method is an interface, we skip it - except if it
1454   // is a miranda method
1455   if (method_holder()->is_interface()) {
1456     // Check that method is not a miranda method
1457     if (ik->lookup_method(name(), signature()) == nullptr) {
1458       // No implementation exist - so miranda method
1459       return false;
1460     }
1461     return true;
1462   }
1463 
1464   assert(ik->is_subclass_of(method_holder()), "should be subklass");
1465   if (!has_vtable_index()) {
1466     return false;
1467   } else {
1468     Method* vt_m = ik->method_at_vtable(vtable_index());
1469     return vt_m != this;
1470   }
1471 }
1472 
1473 
1474 // give advice about whether this Method* should be cached or not
1475 bool Method::should_not_be_cached() const {
1476   if (is_old()) {
1477     // This method has been redefined. It is either EMCP or obsolete
1478     // and we don't want to cache it because that would pin the method
1479     // down and prevent it from being collectible if and when it
1480     // finishes executing.
1481     return true;
1482   }
1483 
1484   // caching this method should be just fine
1485   return false;
1486 }
1487 
1488 
1489 /**
1490  *  Returns true if this is one of the specially treated methods for
1491  *  security related stack walks (like Reflection.getCallerClass).
1492  */
1493 bool Method::is_ignored_by_security_stack_walk() const {
1494   if (intrinsic_id() == vmIntrinsics::_invoke) {
1495     // This is Method.invoke() -- ignore it
1496     return true;
1497   }
1498   if (method_holder()->is_subclass_of(vmClasses::reflect_MethodAccessorImpl_klass())) {
1499     // This is an auxiliary frame -- ignore it
1500     return true;
1501   }
1502   if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
1503     // This is an internal adapter frame for method handles -- ignore it
1504     return true;
1505   }
1506   return false;
1507 }
1508 
1509 
1510 // Constant pool structure for invoke methods:
1511 enum {
1512   _imcp_invoke_name = 1,        // utf8: 'invokeExact', etc.
1513   _imcp_invoke_signature,       // utf8: (variable Symbol*)
1514   _imcp_limit
1515 };
1516 
1517 // Test if this method is an MH adapter frame generated by Java code.
1518 // Cf. java/lang/invoke/InvokerBytecodeGenerator
1519 bool Method::is_compiled_lambda_form() const {
1520   return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
1521 }
1522 
1523 // Test if this method is an internal MH primitive method.
1524 bool Method::is_method_handle_intrinsic() const {
1525   vmIntrinsics::ID iid = intrinsic_id();
1526   return (MethodHandles::is_signature_polymorphic(iid) &&
1527           MethodHandles::is_signature_polymorphic_intrinsic(iid));
1528 }
1529 
1530 bool Method::has_member_arg() const {
1531   vmIntrinsics::ID iid = intrinsic_id();
1532   return (MethodHandles::is_signature_polymorphic(iid) &&
1533           MethodHandles::has_member_arg(iid));
1534 }
1535 
1536 // Make an instance of a signature-polymorphic internal MH primitive.
1537 methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
1538                                                          Symbol* signature,
1539                                                          TRAPS) {
1540   ResourceMark rm(THREAD);
1541   methodHandle empty;
1542 
1543   InstanceKlass* holder = vmClasses::MethodHandle_klass();
1544   Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
1545   assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1546 
1547   log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1548 
1549   // invariant:   cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1550   name->increment_refcount();
1551   signature->increment_refcount();
1552 
1553   int cp_length = _imcp_limit;
1554   ClassLoaderData* loader_data = holder->class_loader_data();
1555   constantPoolHandle cp;
1556   {
1557     ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1558     cp = constantPoolHandle(THREAD, cp_oop);
1559   }
1560   cp->copy_fields(holder->constants());
1561   cp->set_pool_holder(holder);
1562   cp->symbol_at_put(_imcp_invoke_name,       name);
1563   cp->symbol_at_put(_imcp_invoke_signature,  signature);
1564   cp->set_has_preresolution();
1565   cp->set_is_for_method_handle_intrinsic();
1566 
1567   // decide on access bits:  public or not?
1568   int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1569   bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1570   if (must_be_static)  flags_bits |= JVM_ACC_STATIC;
1571   assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1572 
1573   methodHandle m;
1574   {
1575     InlineTableSizes sizes;
1576     Method* m_oop = Method::allocate(loader_data, 0,
1577                                      accessFlags_from(flags_bits), &sizes,
1578                                      ConstMethod::NORMAL,
1579                                      name,
1580                                      CHECK_(empty));
1581     m = methodHandle(THREAD, m_oop);
1582   }
1583   m->set_constants(cp());
1584   m->set_name_index(_imcp_invoke_name);
1585   m->set_signature_index(_imcp_invoke_signature);
1586   assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
1587   assert(m->signature() == signature, "");
1588   m->constMethod()->compute_from_signature(signature, must_be_static);
1589   m->init_intrinsic_id(klass_id_for_intrinsics(m->method_holder()));
1590   assert(m->is_method_handle_intrinsic(), "");
1591 #ifdef ASSERT
1592   if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id()))  m->print();
1593   assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
1594   assert(m->intrinsic_id() == iid, "correctly predicted iid");
1595 #endif //ASSERT
1596 
1597   // Finally, set up its entry points.
1598   assert(m->can_be_statically_bound(), "");
1599   m->set_vtable_index(Method::nonvirtual_vtable_index);
1600   m->link_method(m, CHECK_(empty));
1601 
1602   if (iid == vmIntrinsics::_linkToNative) {
1603     m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1604   }
1605   if (log_is_enabled(Debug, methodhandles)) {
1606     LogTarget(Debug, methodhandles) lt;
1607     LogStream ls(lt);
1608     m->print_on(&ls);
1609   }
1610 
1611   return m;
1612 }
1613 
1614 #if INCLUDE_CDS
1615 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1616   m->link_method(m, CHECK);
1617 
1618   if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1619     m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1620   }
1621 }
1622 #endif
1623 
1624 Klass* Method::check_non_bcp_klass(Klass* klass) {
1625   if (klass != nullptr && klass->class_loader() != nullptr) {
1626     if (klass->is_objArray_klass())
1627       klass = ObjArrayKlass::cast(klass)->bottom_klass();
1628     return klass;
1629   }
1630   return nullptr;
1631 }
1632 
1633 
1634 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1635                                                 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1636   // Code below does not work for native methods - they should never get rewritten anyway
1637   assert(!m->is_native(), "cannot rewrite native methods");
1638   // Allocate new Method*
1639   AccessFlags flags = m->access_flags();
1640 
1641   ConstMethod* cm = m->constMethod();
1642   int checked_exceptions_len = cm->checked_exceptions_length();
1643   int localvariable_len = cm->localvariable_table_length();
1644   int exception_table_len = cm->exception_table_length();
1645   int method_parameters_len = cm->method_parameters_length();
1646   int method_annotations_len = cm->method_annotations_length();
1647   int parameter_annotations_len = cm->parameter_annotations_length();
1648   int type_annotations_len = cm->type_annotations_length();
1649   int default_annotations_len = cm->default_annotations_length();
1650 
1651   InlineTableSizes sizes(
1652       localvariable_len,
1653       new_compressed_linenumber_size,
1654       exception_table_len,
1655       checked_exceptions_len,
1656       method_parameters_len,
1657       cm->generic_signature_index(),
1658       method_annotations_len,
1659       parameter_annotations_len,
1660       type_annotations_len,
1661       default_annotations_len,
1662       0);
1663 
1664   ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
1665   Method* newm_oop = Method::allocate(loader_data,
1666                                       new_code_length,
1667                                       flags,
1668                                       &sizes,
1669                                       m->method_type(),
1670                                       m->name(),
1671                                       CHECK_(methodHandle()));
1672   methodHandle newm (THREAD, newm_oop);
1673 
1674   // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
1675   ConstMethod* newcm = newm->constMethod();
1676   int new_const_method_size = newm->constMethod()->size();
1677 
1678   // This works because the source and target are both Methods. Some compilers
1679   // (e.g., clang) complain that the target vtable pointer will be stomped,
1680   // so cast away newm()'s and m()'s Methodness.
1681   memcpy((void*)newm(), (void*)m(), sizeof(Method));
1682 
1683   // Create shallow copy of ConstMethod.
1684   memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
1685 
1686   // Reset correct method/const method, method size, and parameter info
1687   newm->set_constMethod(newcm);
1688   newm->constMethod()->set_code_size(new_code_length);
1689   newm->constMethod()->set_constMethod_size(new_const_method_size);
1690   assert(newm->code_size() == new_code_length, "check");
1691   assert(newm->method_parameters_length() == method_parameters_len, "check");
1692   assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
1693   assert(newm->exception_table_length() == exception_table_len, "check");
1694   assert(newm->localvariable_table_length() == localvariable_len, "check");
1695   // Copy new byte codes
1696   memcpy(newm->code_base(), new_code, new_code_length);
1697   // Copy line number table
1698   if (new_compressed_linenumber_size > 0) {
1699     memcpy(newm->compressed_linenumber_table(),
1700            new_compressed_linenumber_table,
1701            new_compressed_linenumber_size);
1702   }
1703   // Copy method_parameters
1704   if (method_parameters_len > 0) {
1705     memcpy(newm->method_parameters_start(),
1706            m->method_parameters_start(),
1707            method_parameters_len * sizeof(MethodParametersElement));
1708   }
1709   // Copy checked_exceptions
1710   if (checked_exceptions_len > 0) {
1711     memcpy(newm->checked_exceptions_start(),
1712            m->checked_exceptions_start(),
1713            checked_exceptions_len * sizeof(CheckedExceptionElement));
1714   }
1715   // Copy exception table
1716   if (exception_table_len > 0) {
1717     memcpy(newm->exception_table_start(),
1718            m->exception_table_start(),
1719            exception_table_len * sizeof(ExceptionTableElement));
1720   }
1721   // Copy local variable number table
1722   if (localvariable_len > 0) {
1723     memcpy(newm->localvariable_table_start(),
1724            m->localvariable_table_start(),
1725            localvariable_len * sizeof(LocalVariableTableElement));
1726   }
1727   // Copy stackmap table
1728   if (m->has_stackmap_table()) {
1729     int code_attribute_length = m->stackmap_data()->length();
1730     Array<u1>* stackmap_data =
1731       MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_(methodHandle()));
1732     memcpy((void*)stackmap_data->adr_at(0),
1733            (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1734     newm->set_stackmap_data(stackmap_data);
1735   }
1736 
1737   // copy annotations over to new method
1738   newcm->copy_annotations_from(loader_data, cm, CHECK_(methodHandle()));
1739   return newm;
1740 }
1741 
1742 vmSymbolID Method::klass_id_for_intrinsics(const Klass* holder) {
1743   // if loader is not the default loader (i.e., non-null), we can't know the intrinsics
1744   // because we are not loading from core libraries
1745   // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1746   // which does not use the class default class loader so we check for its loader here
1747   const InstanceKlass* ik = InstanceKlass::cast(holder);
1748   if ((ik->class_loader() != nullptr) && !SystemDictionary::is_platform_class_loader(ik->class_loader())) {
1749     return vmSymbolID::NO_SID;   // regardless of name, no intrinsics here
1750   }
1751 
1752   // see if the klass name is well-known:
1753   Symbol* klass_name = ik->name();
1754   vmSymbolID id = vmSymbols::find_sid(klass_name);
1755   if (id != vmSymbolID::NO_SID && vmIntrinsics::class_has_intrinsics(id)) {
1756     return id;
1757   } else {
1758     return vmSymbolID::NO_SID;
1759   }
1760 }
1761 
1762 void Method::init_intrinsic_id(vmSymbolID klass_id) {
1763   assert(_intrinsic_id == static_cast<int>(vmIntrinsics::_none), "do this just once");
1764   const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
1765   assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
1766   assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
1767 
1768   // the klass name is well-known:
1769   assert(klass_id == klass_id_for_intrinsics(method_holder()), "must be");
1770   assert(klass_id != vmSymbolID::NO_SID, "caller responsibility");
1771 
1772   // ditto for method and signature:
1773   vmSymbolID name_id = vmSymbols::find_sid(name());
1774   if (klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1775       && klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle)
1776       && name_id == vmSymbolID::NO_SID) {
1777     return;
1778   }
1779   vmSymbolID sig_id = vmSymbols::find_sid(signature());
1780   if (klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1781       && klass_id != VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle)
1782       && sig_id == vmSymbolID::NO_SID) {
1783     return;
1784   }
1785   jshort flags = access_flags().as_short();
1786 
1787   vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1788   if (id != vmIntrinsics::_none) {
1789     set_intrinsic_id(id);
1790     if (id == vmIntrinsics::_Class_cast) {
1791       // Even if the intrinsic is rejected, we want to inline this simple method.
1792       set_force_inline();
1793     }
1794     return;
1795   }
1796 
1797   // A few slightly irregular cases:
1798   switch (klass_id) {
1799   // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*., VarHandle
1800   case VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
1801   case VM_SYMBOL_ENUM_NAME(java_lang_invoke_VarHandle):
1802     if (!is_native())  break;
1803     id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
1804     if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
1805       id = vmIntrinsics::_none;
1806     break;
1807 
1808   default:
1809     break;
1810   }
1811 
1812   if (id != vmIntrinsics::_none) {
1813     // Set up its iid.  It is an alias method.
1814     set_intrinsic_id(id);
1815     return;
1816   }
1817 }
1818 
1819 bool Method::load_signature_classes(const methodHandle& m, TRAPS) {
1820   if (!THREAD->can_call_java()) {
1821     // There is nothing useful this routine can do from within the Compile thread.
1822     // Hopefully, the signature contains only well-known classes.
1823     // We could scan for this and return true/false, but the caller won't care.
1824     return false;
1825   }
1826   bool sig_is_loaded = true;
1827   ResourceMark rm(THREAD);
1828   for (ResolvingSignatureStream ss(m()); !ss.is_done(); ss.next()) {
1829     if (ss.is_reference()) {
1830       // load everything, including arrays "[Lfoo;"
1831       Klass* klass = ss.as_klass(SignatureStream::ReturnNull, THREAD);
1832       // We are loading classes eagerly. If a ClassNotFoundException or
1833       // a LinkageError was generated, be sure to ignore it.
1834       if (HAS_PENDING_EXCEPTION) {
1835         if (PENDING_EXCEPTION->is_a(vmClasses::ClassNotFoundException_klass()) ||
1836             PENDING_EXCEPTION->is_a(vmClasses::LinkageError_klass())) {
1837           CLEAR_PENDING_EXCEPTION;
1838         } else {
1839           return false;
1840         }
1841       }
1842       if( klass == nullptr) { sig_is_loaded = false; }
1843     }
1844   }
1845   return sig_is_loaded;
1846 }
1847 
1848 // Exposed so field engineers can debug VM
1849 void Method::print_short_name(outputStream* st) const {
1850   ResourceMark rm;
1851 #ifdef PRODUCT
1852   st->print(" %s::", method_holder()->external_name());
1853 #else
1854   st->print(" %s::", method_holder()->internal_name());
1855 #endif
1856   name()->print_symbol_on(st);
1857   if (WizardMode) signature()->print_symbol_on(st);
1858   else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1859     MethodHandles::print_as_basic_type_signature_on(st, signature());
1860 }
1861 
1862 // Comparer for sorting an object array containing
1863 // Method*s.
1864 static int method_comparator(Method* a, Method* b) {
1865   return a->name()->fast_compare(b->name());
1866 }
1867 
1868 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1869 // default_methods also uses this without the ordering for fast find_method
1870 void Method::sort_methods(Array<Method*>* methods, bool set_idnums, method_comparator_func func) {
1871   int length = methods->length();
1872   if (length > 1) {
1873     if (func == nullptr) {
1874       func = method_comparator;
1875     }
1876     {
1877       NoSafepointVerifier nsv;
1878       QuickSort::sort(methods->data(), length, func, /*idempotent=*/false);
1879     }
1880     // Reset method ordering
1881     if (set_idnums) {
1882       for (u2 i = 0; i < length; i++) {
1883         Method* m = methods->at(i);
1884         m->set_method_idnum(i);
1885         m->set_orig_method_idnum(i);
1886       }
1887     }
1888   }
1889 }
1890 
1891 //-----------------------------------------------------------------------------------
1892 // Non-product code unless JVM/TI needs it
1893 
1894 #if !defined(PRODUCT) || INCLUDE_JVMTI
1895 class SignatureTypePrinter : public SignatureTypeNames {
1896  private:
1897   outputStream* _st;
1898   bool _use_separator;
1899 
1900   void type_name(const char* name) {
1901     if (_use_separator) _st->print(", ");
1902     _st->print("%s", name);
1903     _use_separator = true;
1904   }
1905 
1906  public:
1907   SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1908     _st = st;
1909     _use_separator = false;
1910   }
1911 
1912   void print_parameters()              { _use_separator = false; do_parameters_on(this); }
1913   void print_returntype()              { _use_separator = false; do_type(return_type()); }
1914 };
1915 
1916 
1917 void Method::print_name(outputStream* st) const {
1918   Thread *thread = Thread::current();
1919   ResourceMark rm(thread);
1920   st->print("%s ", is_static() ? "static" : "virtual");
1921   if (WizardMode) {
1922     st->print("%s.", method_holder()->internal_name());
1923     name()->print_symbol_on(st);
1924     signature()->print_symbol_on(st);
1925   } else {
1926     SignatureTypePrinter sig(signature(), st);
1927     sig.print_returntype();
1928     st->print(" %s.", method_holder()->internal_name());
1929     name()->print_symbol_on(st);
1930     st->print("(");
1931     sig.print_parameters();
1932     st->print(")");
1933   }
1934 }
1935 #endif // !PRODUCT || INCLUDE_JVMTI
1936 
1937 
1938 void Method::print_codes_on(outputStream* st, int flags) const {
1939   print_codes_on(0, code_size(), st, flags);
1940 }
1941 
1942 void Method::print_codes_on(int from, int to, outputStream* st, int flags) const {
1943   Thread *thread = Thread::current();
1944   ResourceMark rm(thread);
1945   methodHandle mh (thread, (Method*)this);
1946   BytecodeTracer::print_method_codes(mh, from, to, st, flags);
1947 }
1948 
1949 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
1950   _bci = 0;
1951   _line = 0;
1952 };
1953 
1954 bool CompressedLineNumberReadStream::read_pair() {
1955   jubyte next = read_byte();
1956   // Check for terminator
1957   if (next == 0) return false;
1958   if (next == 0xFF) {
1959     // Escape character, regular compression used
1960     _bci  += read_signed_int();
1961     _line += read_signed_int();
1962   } else {
1963     // Single byte compression used
1964     _bci  += next >> 3;
1965     _line += next & 0x7;
1966   }
1967   return true;
1968 }
1969 
1970 #if INCLUDE_JVMTI
1971 
1972 Bytecodes::Code Method::orig_bytecode_at(int bci) const {
1973   BreakpointInfo* bp = method_holder()->breakpoints();
1974   for (; bp != nullptr; bp = bp->next()) {
1975     if (bp->match(this, bci)) {
1976       return bp->orig_bytecode();
1977     }
1978   }
1979   {
1980     ResourceMark rm;
1981     fatal("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci);
1982   }
1983   return Bytecodes::_shouldnotreachhere;
1984 }
1985 
1986 void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
1987   assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
1988   BreakpointInfo* bp = method_holder()->breakpoints();
1989   for (; bp != nullptr; bp = bp->next()) {
1990     if (bp->match(this, bci)) {
1991       bp->set_orig_bytecode(code);
1992       // and continue, in case there is more than one
1993     }
1994   }
1995 }
1996 
1997 void Method::set_breakpoint(int bci) {
1998   InstanceKlass* ik = method_holder();
1999   BreakpointInfo *bp = new BreakpointInfo(this, bci);
2000   bp->set_next(ik->breakpoints());
2001   ik->set_breakpoints(bp);
2002   // do this last:
2003   bp->set(this);
2004 }
2005 
2006 static void clear_matches(Method* m, int bci) {
2007   InstanceKlass* ik = m->method_holder();
2008   BreakpointInfo* prev_bp = nullptr;
2009   BreakpointInfo* next_bp;
2010   for (BreakpointInfo* bp = ik->breakpoints(); bp != nullptr; bp = next_bp) {
2011     next_bp = bp->next();
2012     // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
2013     if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
2014       // do this first:
2015       bp->clear(m);
2016       // unhook it
2017       if (prev_bp != nullptr)
2018         prev_bp->set_next(next_bp);
2019       else
2020         ik->set_breakpoints(next_bp);
2021       delete bp;
2022       // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
2023       // at same location. So we have multiple matching (method_index and bci)
2024       // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
2025       // breakpoint for clear_breakpoint request and keep all other method versions
2026       // BreakpointInfo for future clear_breakpoint request.
2027       // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
2028       // which is being called when class is unloaded. We delete all the Breakpoint
2029       // information for all versions of method. We may not correctly restore the original
2030       // bytecode in all method versions, but that is ok. Because the class is being unloaded
2031       // so these methods won't be used anymore.
2032       if (bci >= 0) {
2033         break;
2034       }
2035     } else {
2036       // This one is a keeper.
2037       prev_bp = bp;
2038     }
2039   }
2040 }
2041 
2042 void Method::clear_breakpoint(int bci) {
2043   assert(bci >= 0, "");
2044   clear_matches(this, bci);
2045 }
2046 
2047 void Method::clear_all_breakpoints() {
2048   clear_matches(this, -1);
2049 }
2050 
2051 #endif // INCLUDE_JVMTI
2052 
2053 int Method::invocation_count() const {
2054   MethodCounters* mcs = method_counters();
2055   MethodData* mdo = method_data();
2056   if (((mcs != nullptr) ? mcs->invocation_counter()->carry() : false) ||
2057       ((mdo != nullptr) ? mdo->invocation_counter()->carry() : false)) {
2058     return InvocationCounter::count_limit;
2059   } else {
2060     return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
2061            ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
2062   }
2063 }
2064 
2065 int Method::backedge_count() const {
2066   MethodCounters* mcs = method_counters();
2067   MethodData* mdo = method_data();
2068   if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
2069       ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
2070     return InvocationCounter::count_limit;
2071   } else {
2072     return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
2073            ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
2074   }
2075 }
2076 
2077 int Method::highest_comp_level() const {
2078   const MethodCounters* mcs = method_counters();
2079   CompiledMethod* nm = code();
2080   int level = (nm != nullptr) ? nm->comp_level() : CompLevel_none;
2081   if (mcs != nullptr) {
2082     return MAX2(mcs->highest_comp_level(), level);
2083   } else {
2084     return CompLevel_none;
2085   }
2086 }
2087 
2088 int Method::highest_osr_comp_level() const {
2089   const MethodCounters* mcs = method_counters();
2090   if (mcs != nullptr) {
2091     return mcs->highest_osr_comp_level();
2092   } else {
2093     return CompLevel_none;
2094   }
2095 }
2096 
2097 void Method::set_highest_comp_level(int level) {
2098   MethodCounters* mcs = method_counters();
2099   if (mcs != nullptr) {
2100     mcs->set_highest_comp_level(level);
2101   }
2102 }
2103 
2104 void Method::set_highest_osr_comp_level(int level) {
2105   MethodCounters* mcs = method_counters();
2106   if (mcs != nullptr) {
2107     mcs->set_highest_osr_comp_level(level);
2108   }
2109 }
2110 
2111 #if INCLUDE_JVMTI
2112 
2113 BreakpointInfo::BreakpointInfo(Method* m, int bci) {
2114   _bci = bci;
2115   _name_index = m->name_index();
2116   _signature_index = m->signature_index();
2117   _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
2118   if (_orig_bytecode == Bytecodes::_breakpoint)
2119     _orig_bytecode = m->orig_bytecode_at(_bci);
2120   _next = nullptr;
2121 }
2122 
2123 void BreakpointInfo::set(Method* method) {
2124 #ifdef ASSERT
2125   {
2126     Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
2127     if (code == Bytecodes::_breakpoint)
2128       code = method->orig_bytecode_at(_bci);
2129     assert(orig_bytecode() == code, "original bytecode must be the same");
2130   }
2131 #endif
2132   Thread *thread = Thread::current();
2133   *method->bcp_from(_bci) = Bytecodes::_breakpoint;
2134   method->incr_number_of_breakpoints(thread);
2135   {
2136     // Deoptimize all dependents on this method
2137     HandleMark hm(thread);
2138     methodHandle mh(thread, method);
2139     CodeCache::mark_dependents_on_method_for_breakpoint(mh);
2140   }
2141 }
2142 
2143 void BreakpointInfo::clear(Method* method) {
2144   *method->bcp_from(_bci) = orig_bytecode();
2145   assert(method->number_of_breakpoints() > 0, "must not go negative");
2146   method->decr_number_of_breakpoints(Thread::current());
2147 }
2148 
2149 #endif // INCLUDE_JVMTI
2150 
2151 // jmethodID handling
2152 
2153 // This is a block allocating object, sort of like JNIHandleBlock, only a
2154 // lot simpler.
2155 // It's allocated on the CHeap because once we allocate a jmethodID, we can
2156 // never get rid of it.
2157 
2158 static const int min_block_size = 8;
2159 
2160 class JNIMethodBlockNode : public CHeapObj<mtClass> {
2161   friend class JNIMethodBlock;
2162   Method**        _methods;
2163   int             _number_of_methods;
2164   int             _top;
2165   JNIMethodBlockNode* _next;
2166 
2167  public:
2168 
2169   JNIMethodBlockNode(int num_methods = min_block_size);
2170 
2171   ~JNIMethodBlockNode() { FREE_C_HEAP_ARRAY(Method*, _methods); }
2172 
2173   void ensure_methods(int num_addl_methods) {
2174     if (_top < _number_of_methods) {
2175       num_addl_methods -= _number_of_methods - _top;
2176       if (num_addl_methods <= 0) {
2177         return;
2178       }
2179     }
2180     if (_next == nullptr) {
2181       _next = new JNIMethodBlockNode(MAX2(num_addl_methods, min_block_size));
2182     } else {
2183       _next->ensure_methods(num_addl_methods);
2184     }
2185   }
2186 };
2187 
2188 class JNIMethodBlock : public CHeapObj<mtClass> {
2189   JNIMethodBlockNode _head;
2190   JNIMethodBlockNode *_last_free;
2191  public:
2192   static Method* const _free_method;
2193 
2194   JNIMethodBlock(int initial_capacity = min_block_size)
2195       : _head(initial_capacity), _last_free(&_head) {}
2196 
2197   void ensure_methods(int num_addl_methods) {
2198     _last_free->ensure_methods(num_addl_methods);
2199   }
2200 
2201   Method** add_method(Method* m) {
2202     for (JNIMethodBlockNode* b = _last_free; b != nullptr; b = b->_next) {
2203       if (b->_top < b->_number_of_methods) {
2204         // top points to the next free entry.
2205         int i = b->_top;
2206         b->_methods[i] = m;
2207         b->_top++;
2208         _last_free = b;
2209         return &(b->_methods[i]);
2210       } else if (b->_top == b->_number_of_methods) {
2211         // if the next free entry ran off the block see if there's a free entry
2212         for (int i = 0; i < b->_number_of_methods; i++) {
2213           if (b->_methods[i] == _free_method) {
2214             b->_methods[i] = m;
2215             _last_free = b;
2216             return &(b->_methods[i]);
2217           }
2218         }
2219         // Only check each block once for frees.  They're very unlikely.
2220         // Increment top past the end of the block.
2221         b->_top++;
2222       }
2223       // need to allocate a next block.
2224       if (b->_next == nullptr) {
2225         b->_next = _last_free = new JNIMethodBlockNode();
2226       }
2227     }
2228     guarantee(false, "Should always allocate a free block");
2229     return nullptr;
2230   }
2231 
2232   bool contains(Method** m) {
2233     if (m == nullptr) return false;
2234     for (JNIMethodBlockNode* b = &_head; b != nullptr; b = b->_next) {
2235       if (b->_methods <= m && m < b->_methods + b->_number_of_methods) {
2236         // This is a bit of extra checking, for two reasons.  One is
2237         // that contains() deals with pointers that are passed in by
2238         // JNI code, so making sure that the pointer is aligned
2239         // correctly is valuable.  The other is that <= and > are
2240         // technically not defined on pointers, so the if guard can
2241         // pass spuriously; no modern compiler is likely to make that
2242         // a problem, though (and if one did, the guard could also
2243         // fail spuriously, which would be bad).
2244         ptrdiff_t idx = m - b->_methods;
2245         if (b->_methods + idx == m) {
2246           return true;
2247         }
2248       }
2249     }
2250     return false;  // not found
2251   }
2252 
2253   // Doesn't really destroy it, just marks it as free so it can be reused.
2254   void destroy_method(Method** m) {
2255 #ifdef ASSERT
2256     assert(contains(m), "should be a methodID");
2257 #endif // ASSERT
2258     *m = _free_method;
2259   }
2260 
2261   // During class unloading the methods are cleared, which is different
2262   // than freed.
2263   void clear_all_methods() {
2264     for (JNIMethodBlockNode* b = &_head; b != nullptr; b = b->_next) {
2265       for (int i = 0; i< b->_number_of_methods; i++) {
2266         b->_methods[i] = nullptr;
2267       }
2268     }
2269   }
2270 #ifndef PRODUCT
2271   int count_methods() {
2272     // count all allocated methods
2273     int count = 0;
2274     for (JNIMethodBlockNode* b = &_head; b != nullptr; b = b->_next) {
2275       for (int i = 0; i< b->_number_of_methods; i++) {
2276         if (b->_methods[i] != _free_method) count++;
2277       }
2278     }
2279     return count;
2280   }
2281 #endif // PRODUCT
2282 };
2283 
2284 // Something that can't be mistaken for an address or a markWord
2285 Method* const JNIMethodBlock::_free_method = (Method*)55;
2286 
2287 JNIMethodBlockNode::JNIMethodBlockNode(int num_methods) : _top(0), _next(nullptr) {
2288   _number_of_methods = MAX2(num_methods, min_block_size);
2289   _methods = NEW_C_HEAP_ARRAY(Method*, _number_of_methods, mtInternal);
2290   for (int i = 0; i < _number_of_methods; i++) {
2291     _methods[i] = JNIMethodBlock::_free_method;
2292   }
2293 }
2294 
2295 void Method::ensure_jmethod_ids(ClassLoaderData* cld, int capacity) {
2296   // Have to add jmethod_ids() to class loader data thread-safely.
2297   // Also have to add the method to the list safely, which the lock
2298   // protects as well.
2299   MutexLocker ml(JmethodIdCreation_lock,  Mutex::_no_safepoint_check_flag);
2300   if (cld->jmethod_ids() == nullptr) {
2301     cld->set_jmethod_ids(new JNIMethodBlock(capacity));
2302   } else {
2303     cld->jmethod_ids()->ensure_methods(capacity);
2304   }
2305 }
2306 
2307 // Add a method id to the jmethod_ids
2308 jmethodID Method::make_jmethod_id(ClassLoaderData* cld, Method* m) {
2309   // Have to add jmethod_ids() to class loader data thread-safely.
2310   // Also have to add the method to the list safely, which the lock
2311   // protects as well.
2312   assert(JmethodIdCreation_lock->owned_by_self(), "sanity check");
2313   if (cld->jmethod_ids() == nullptr) {
2314     cld->set_jmethod_ids(new JNIMethodBlock());
2315   }
2316   // jmethodID is a pointer to Method*
2317   return (jmethodID)cld->jmethod_ids()->add_method(m);
2318 }
2319 
2320 jmethodID Method::jmethod_id() {
2321   methodHandle mh(Thread::current(), this);
2322   return method_holder()->get_jmethod_id(mh);
2323 }
2324 
2325 // Mark a jmethodID as free.  This is called when there is a data race in
2326 // InstanceKlass while creating the jmethodID cache.
2327 void Method::destroy_jmethod_id(ClassLoaderData* cld, jmethodID m) {
2328   Method** ptr = (Method**)m;
2329   assert(cld->jmethod_ids() != nullptr, "should have method handles");
2330   cld->jmethod_ids()->destroy_method(ptr);
2331 }
2332 
2333 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
2334   // Can't assert the method_holder is the same because the new method has the
2335   // scratch method holder.
2336   assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
2337            == new_method->method_holder()->class_loader() ||
2338            new_method->method_holder()->class_loader() == nullptr, // allow Unsafe substitution
2339          "changing to a different class loader");
2340   // Just change the method in place, jmethodID pointer doesn't change.
2341   *((Method**)jmid) = new_method;
2342 }
2343 
2344 bool Method::is_method_id(jmethodID mid) {
2345   Method* m = resolve_jmethod_id(mid);
2346   assert(m != nullptr, "should be called with non-null method");
2347   InstanceKlass* ik = m->method_holder();
2348   ClassLoaderData* cld = ik->class_loader_data();
2349   if (cld->jmethod_ids() == nullptr) return false;
2350   return (cld->jmethod_ids()->contains((Method**)mid));
2351 }
2352 
2353 Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
2354   if (mid == nullptr) return nullptr;
2355   Method* o = resolve_jmethod_id(mid);
2356   if (o == nullptr || o == JNIMethodBlock::_free_method) {
2357     return nullptr;
2358   }
2359   // Method should otherwise be valid. Assert for testing.
2360   assert(is_valid_method(o), "should be valid jmethodid");
2361   // If the method's class holder object is unreferenced, but not yet marked as
2362   // unloaded, we need to return null here too because after a safepoint, its memory
2363   // will be reclaimed.
2364   return o->method_holder()->is_loader_alive() ? o : nullptr;
2365 };
2366 
2367 void Method::set_on_stack(const bool value) {
2368   // Set both the method itself and its constant pool.  The constant pool
2369   // on stack means some method referring to it is also on the stack.
2370   constants()->set_on_stack(value);
2371 
2372   bool already_set = on_stack_flag();
2373   set_on_stack_flag(value);
2374   if (value && !already_set) {
2375     MetadataOnStackMark::record(this);
2376   }
2377 }
2378 
2379 void Method::record_gc_epoch() {
2380   // If any method is on the stack in continuations, none of them can be reclaimed,
2381   // so save the marking cycle to check for the whole class in the cpCache.
2382   // The cpCache is writeable.
2383   constants()->cache()->record_gc_epoch();
2384 }
2385 
2386 // Called when the class loader is unloaded to make all methods weak.
2387 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
2388   loader_data->jmethod_ids()->clear_all_methods();
2389 }
2390 
2391 void Method::clear_jmethod_id() {
2392   // Being at a safepoint prevents racing against other class redefinitions
2393   assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
2394   // The jmethodID is not stored in the Method instance, we need to look it up first
2395   jmethodID methodid = find_jmethod_id_or_null();
2396   // We need to make sure that jmethodID actually resolves to this method
2397   // - multiple redefined versions may share jmethodID slots and if a method
2398   //   has already been rewired to a newer version we could be removing reference
2399   //   to a still existing method instance
2400   if (methodid != nullptr && *((Method**)methodid) == this) {
2401     *((Method**)methodid) = nullptr;
2402   }
2403 }
2404 
2405 bool Method::has_method_vptr(const void* ptr) {
2406   Method m;
2407   // This assumes that the vtbl pointer is the first word of a C++ object.
2408   return dereference_vptr(&m) == dereference_vptr(ptr);
2409 }
2410 
2411 // Check that this pointer is valid by checking that the vtbl pointer matches
2412 bool Method::is_valid_method(const Method* m) {
2413   if (m == nullptr) {
2414     return false;
2415   } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2416     // Quick sanity check on pointer.
2417     return false;
2418   } else if (!os::is_readable_range(m, m + 1)) {
2419     return false;
2420   } else if (m->is_shared()) {
2421     return CppVtables::is_valid_shared_method(m);
2422   } else if (Metaspace::contains_non_shared(m)) {
2423     return has_method_vptr((const void*)m);
2424   } else {
2425     return false;
2426   }
2427 }
2428 
2429 #ifndef PRODUCT
2430 void Method::print_jmethod_ids_count(const ClassLoaderData* loader_data, outputStream* out) {
2431   out->print("%d", loader_data->jmethod_ids()->count_methods());
2432 }
2433 #endif // PRODUCT
2434 
2435 
2436 // Printing
2437 
2438 #ifndef PRODUCT
2439 
2440 void Method::print_on(outputStream* st) const {
2441   ResourceMark rm;
2442   assert(is_method(), "must be method");
2443   st->print_cr("%s", internal_name());
2444   st->print_cr(" - this oop:          " PTR_FORMAT, p2i(this));
2445   st->print   (" - method holder:     "); method_holder()->print_value_on(st); st->cr();
2446   st->print   (" - constants:         " PTR_FORMAT " ", p2i(constants()));
2447   constants()->print_value_on(st); st->cr();
2448   st->print   (" - access:            0x%x  ", access_flags().as_int()); access_flags().print_on(st); st->cr();
2449   st->print   (" - flags:             0x%x  ", _flags.as_int()); _flags.print_on(st); st->cr();
2450   st->print   (" - name:              ");    name()->print_value_on(st); st->cr();
2451   st->print   (" - signature:         ");    signature()->print_value_on(st); st->cr();
2452   st->print_cr(" - max stack:         %d",   max_stack());
2453   st->print_cr(" - max locals:        %d",   max_locals());
2454   st->print_cr(" - size of params:    %d",   size_of_parameters());
2455   st->print_cr(" - method size:       %d",   method_size());
2456   if (intrinsic_id() != vmIntrinsics::_none)
2457     st->print_cr(" - intrinsic id:      %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2458   if (highest_comp_level() != CompLevel_none)
2459     st->print_cr(" - highest level:     %d", highest_comp_level());
2460   st->print_cr(" - vtable index:      %d",   _vtable_index);
2461   st->print_cr(" - i2i entry:         " PTR_FORMAT, p2i(interpreter_entry()));
2462   st->print(   " - adapters:          ");
2463   AdapterHandlerEntry* a = ((Method*)this)->adapter();
2464   if (a == nullptr)
2465     st->print_cr(PTR_FORMAT, p2i(a));
2466   else
2467     a->print_adapter_on(st);
2468   st->print_cr(" - compiled entry     " PTR_FORMAT, p2i(from_compiled_entry()));
2469   st->print_cr(" - code size:         %d",   code_size());
2470   if (code_size() != 0) {
2471     st->print_cr(" - code start:        " PTR_FORMAT, p2i(code_base()));
2472     st->print_cr(" - code end (excl):   " PTR_FORMAT, p2i(code_base() + code_size()));
2473   }
2474   if (method_data() != nullptr) {
2475     st->print_cr(" - method data:       " PTR_FORMAT, p2i(method_data()));
2476   }
2477   st->print_cr(" - checked ex length: %d",   checked_exceptions_length());
2478   if (checked_exceptions_length() > 0) {
2479     CheckedExceptionElement* table = checked_exceptions_start();
2480     st->print_cr(" - checked ex start:  " PTR_FORMAT, p2i(table));
2481     if (Verbose) {
2482       for (int i = 0; i < checked_exceptions_length(); i++) {
2483         st->print_cr("   - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2484       }
2485     }
2486   }
2487   if (has_linenumber_table()) {
2488     u_char* table = compressed_linenumber_table();
2489     st->print_cr(" - linenumber start:  " PTR_FORMAT, p2i(table));
2490     if (Verbose) {
2491       CompressedLineNumberReadStream stream(table);
2492       while (stream.read_pair()) {
2493         st->print_cr("   - line %d: %d", stream.line(), stream.bci());
2494       }
2495     }
2496   }
2497   st->print_cr(" - localvar length:   %d",   localvariable_table_length());
2498   if (localvariable_table_length() > 0) {
2499     LocalVariableTableElement* table = localvariable_table_start();
2500     st->print_cr(" - localvar start:    " PTR_FORMAT, p2i(table));
2501     if (Verbose) {
2502       for (int i = 0; i < localvariable_table_length(); i++) {
2503         int bci = table[i].start_bci;
2504         int len = table[i].length;
2505         const char* name = constants()->printable_name_at(table[i].name_cp_index);
2506         const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
2507         int slot = table[i].slot;
2508         st->print_cr("   - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
2509       }
2510     }
2511   }
2512   if (code() != nullptr) {
2513     st->print   (" - compiled code: ");
2514     code()->print_value_on(st);
2515   }
2516   if (is_native()) {
2517     st->print_cr(" - native function:   " PTR_FORMAT, p2i(native_function()));
2518     st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler()));
2519   }
2520 }
2521 
2522 void Method::print_linkage_flags(outputStream* st) {
2523   access_flags().print_on(st);
2524   if (is_default_method()) {
2525     st->print("default ");
2526   }
2527   if (is_overpass()) {
2528     st->print("overpass ");
2529   }
2530 }
2531 #endif //PRODUCT
2532 
2533 void Method::print_value_on(outputStream* st) const {
2534   assert(is_method(), "must be method");
2535   st->print("%s", internal_name());
2536   print_address_on(st);
2537   st->print(" ");
2538   name()->print_value_on(st);
2539   st->print(" ");
2540   signature()->print_value_on(st);
2541   st->print(" in ");
2542   method_holder()->print_value_on(st);
2543   if (WizardMode) st->print("#%d", _vtable_index);
2544   if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2545   if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2546 }
2547 
2548 // Verification
2549 
2550 void Method::verify_on(outputStream* st) {
2551   guarantee(is_method(), "object must be method");
2552   guarantee(constants()->is_constantPool(), "should be constant pool");
2553   MethodData* md = method_data();
2554   guarantee(md == nullptr ||
2555       md->is_methodData(), "should be method data");
2556 }