< prev index next >

src/hotspot/share/oops/method.cpp

Print this page

   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "cds/cppVtables.hpp"
  28 #include "cds/metaspaceShared.hpp"

  29 #include "classfile/classLoaderDataGraph.hpp"
  30 #include "classfile/metadataOnStackMark.hpp"
  31 #include "classfile/symbolTable.hpp"
  32 #include "classfile/systemDictionary.hpp"
  33 #include "classfile/vmClasses.hpp"
  34 #include "code/codeCache.hpp"
  35 #include "code/debugInfoRec.hpp"
  36 #include "compiler/compilationPolicy.hpp"
  37 #include "gc/shared/collectedHeap.inline.hpp"
  38 #include "interpreter/bytecodeStream.hpp"
  39 #include "interpreter/bytecodeTracer.hpp"
  40 #include "interpreter/bytecodes.hpp"
  41 #include "interpreter/interpreter.hpp"
  42 #include "interpreter/oopMapCache.hpp"
  43 #include "logging/log.hpp"
  44 #include "logging/logStream.hpp"
  45 #include "logging/logTag.hpp"
  46 #include "memory/allocation.inline.hpp"
  47 #include "memory/metadataFactory.hpp"
  48 #include "memory/metaspaceClosure.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "nmt/memTracker.hpp"
  53 #include "oops/constMethod.hpp"
  54 #include "oops/constantPool.hpp"
  55 #include "oops/klass.inline.hpp"
  56 #include "oops/method.inline.hpp"
  57 #include "oops/methodData.hpp"
  58 #include "oops/objArrayKlass.hpp"
  59 #include "oops/objArrayOop.inline.hpp"
  60 #include "oops/oop.inline.hpp"
  61 #include "oops/symbol.hpp"

  62 #include "prims/jvmtiExport.hpp"
  63 #include "prims/methodHandles.hpp"
  64 #include "runtime/atomic.hpp"
  65 #include "runtime/continuationEntry.hpp"
  66 #include "runtime/frame.inline.hpp"
  67 #include "runtime/handles.inline.hpp"
  68 #include "runtime/init.hpp"
  69 #include "runtime/java.hpp"
  70 #include "runtime/orderAccess.hpp"

  71 #include "runtime/relocator.hpp"
  72 #include "runtime/safepointVerifiers.hpp"
  73 #include "runtime/sharedRuntime.hpp"
  74 #include "runtime/signature.hpp"
  75 #include "runtime/threads.hpp"
  76 #include "runtime/vm_version.hpp"
  77 #include "utilities/align.hpp"
  78 #include "utilities/quickSort.hpp"
  79 #include "utilities/vmError.hpp"
  80 #include "utilities/xmlstream.hpp"
  81 
  82 // Implementation of Method
  83 
  84 Method* Method::allocate(ClassLoaderData* loader_data,
  85                          int byte_code_size,
  86                          AccessFlags access_flags,
  87                          InlineTableSizes* sizes,
  88                          ConstMethod::MethodType method_type,
  89                          Symbol* name,
  90                          TRAPS) {

 161 }
 162 
 163 address Method::get_c2i_no_clinit_check_entry() {
 164   assert(VM_Version::supports_fast_class_init_checks(), "");
 165   assert(adapter() != nullptr, "must have");
 166   return adapter()->get_c2i_no_clinit_check_entry();
 167 }
 168 
 169 char* Method::name_and_sig_as_C_string() const {
 170   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 171 }
 172 
 173 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 174   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 175 }
 176 
 177 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 178   const char* klass_name = klass->external_name();
 179   int klass_name_len  = (int)strlen(klass_name);
 180   int method_name_len = method_name->utf8_length();
 181   int len             = klass_name_len + 1 + method_name_len + signature->utf8_length();
 182   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 183   strcpy(dest, klass_name);
 184   dest[klass_name_len] = '.';
 185   strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
 186   strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());

 187   dest[len] = 0;
 188   return dest;
 189 }
 190 
 191 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 192   Symbol* klass_name = klass->name();
 193   klass_name->as_klass_external_name(buf, size);
 194   int len = (int)strlen(buf);
 195 
 196   if (len < size - 1) {
 197     buf[len++] = '.';
 198 
 199     method_name->as_C_string(&(buf[len]), size - len);
 200     len = (int)strlen(buf);
 201 
 202     signature->as_C_string(&(buf[len]), size - len);
 203   }
 204 
 205   return buf;
 206 }

 368 address Method::bcp_from(address bcp) const {
 369   if (is_native() && bcp == nullptr) {
 370     return code_base();
 371   } else {
 372     return bcp;
 373   }
 374 }
 375 
 376 int Method::size(bool is_native) {
 377   // If native, then include pointers for native_function and signature_handler
 378   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 379   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 380   return align_metadata_size(header_size() + extra_words);
 381 }
 382 
 383 Symbol* Method::klass_name() const {
 384   return method_holder()->name();
 385 }
 386 
 387 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 388   log_trace(cds)("Iter(Method): %p", this);
 389 
 390   if (!method_holder()->is_rewritten()) {





 391     it->push(&_constMethod, MetaspaceClosure::_writable);
 392   } else {
 393     it->push(&_constMethod);
 394   }
 395   it->push(&_method_data);
 396   it->push(&_method_counters);
 397   NOT_PRODUCT(it->push(&_name);)
 398 }
 399 
 400 #if INCLUDE_CDS
 401 // Attempt to return method to original state.  Clear any pointers
 402 // (to objects outside the shared spaces).  We won't be able to predict
 403 // where they should point in a new JVM.  Further initialize some
 404 // entries now in order allow them to be write protected later.
 405 
 406 void Method::remove_unshareable_info() {
 407   unlink_method();






 408   JFR_ONLY(REMOVE_METHOD_ID(this);)
 409 }
 410 
 411 void Method::restore_unshareable_info(TRAPS) {
 412   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");






 413   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");

 414 }
 415 #endif
 416 
 417 void Method::set_vtable_index(int index) {
 418   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 419     // At runtime initialize_vtable is rerun as part of link_class_impl()
 420     // for a shared class loaded by the non-boot loader to obtain the loader
 421     // constraints based on the runtime classloaders' context.
 422     return; // don't write into the shared class
 423   } else {
 424     _vtable_index = index;
 425   }
 426 }
 427 
 428 void Method::set_itable_index(int index) {
 429   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 430     // At runtime initialize_itable is rerun as part of link_class_impl()
 431     // for a shared class loaded by the non-boot loader to obtain the loader
 432     // constraints based on the runtime classloaders' context. The dumptime
 433     // itable index should be the same as the runtime index.

 560   // Counting based on signed int counters tends to overflow with
 561   // longer-running workloads on fast machines. The counters under
 562   // consideration here, however, are limited in range by counting
 563   // logic. See InvocationCounter:count_limit for example.
 564   // No "overflow precautions" need to be implemented here.
 565   st->print_cr ("  interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
 566   st->print_cr ("  invocation_counter:           " INT32_FORMAT_W(11), invocation_count());
 567   st->print_cr ("  backedge_counter:             " INT32_FORMAT_W(11), backedge_count());
 568 
 569   if (method_data() != nullptr) {
 570     st->print_cr ("  decompile_count:              " UINT32_FORMAT_W(11), method_data()->decompile_count());
 571   }
 572 
 573 #ifndef PRODUCT
 574   if (CountCompiledCalls) {
 575     st->print_cr ("  compiled_invocation_count:    " INT64_FORMAT_W(11), compiled_invocation_count());
 576   }
 577 #endif
 578 }
 579 




























 580 // Build a MethodData* object to hold profiling information collected on this
 581 // method when requested.
 582 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {



 583   // Do not profile the method if metaspace has hit an OOM previously
 584   // allocating profiling data. Callers clear pending exception so don't
 585   // add one here.
 586   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 587     return;
 588   }
 589 
 590   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 591   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 592   if (HAS_PENDING_EXCEPTION) {
 593     CompileBroker::log_metaspace_failure();
 594     ClassLoaderDataGraph::set_metaspace_oom(true);
 595     return;   // return the exception (which is cleared)
 596   }
 597 
 598   if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
 599     MetadataFactory::free_metadata(loader_data, method_data);
 600     return;
 601   }
 602 
 603   if (PrintMethodData && (Verbose || WizardMode)) {













 604     ResourceMark rm(THREAD);
 605     tty->print("build_profiling_method_data for ");
 606     method->print_name(tty);
 607     tty->cr();
 608     // At the end of the run, the MDO, full of data, will be dumped.
 609   }
 610 }
 611 
 612 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 613   // Do not profile the method if metaspace has hit an OOM previously
 614   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 615     return nullptr;
 616   }
 617 
 618   methodHandle mh(current, m);
 619   MethodCounters* counters;
 620   if (current->is_Java_thread()) {
 621     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 622     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 623     // if needed.
 624     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 625     if (HAS_PENDING_EXCEPTION) {
 626       CLEAR_PENDING_EXCEPTION;
 627     }
 628   } else {
 629     // Call metaspace allocation that doesn't throw exception if the
 630     // current thread isn't a JavaThread, ie. the VMThread.
 631     counters = MethodCounters::allocate_no_exception(mh);
 632   }
 633 
 634   if (counters == nullptr) {
 635     CompileBroker::log_metaspace_failure();
 636     ClassLoaderDataGraph::set_metaspace_oom(true);
 637     return nullptr;
 638   }
 639 
 640   if (!mh->init_method_counters(counters)) {
 641     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 642   }
 643 





 644   return mh->method_counters();
 645 }
 646 
 647 bool Method::init_method_counters(MethodCounters* counters) {
 648   // Try to install a pointer to MethodCounters, return true on success.
 649   return Atomic::replace_if_null(&_method_counters, counters);
 650 }
 651 
 652 void Method::set_exception_handler_entered(int handler_bci) {
 653   if (ProfileExceptionHandlers) {
 654     MethodData* mdo = method_data();
 655     if (mdo != nullptr) {
 656       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 657       handler_data.set_exception_handler_entered();
 658     }
 659   }
 660 }
 661 
 662 int Method::extra_stack_words() {
 663   // not an inline function, to avoid a header dependency on Interpreter

 897   return (is_static() ||
 898           method_holder()->major_version() < 51);
 899 }
 900 
 901 bool Method::is_static_initializer() const {
 902   // For classfiles version 51 or greater, ensure that the clinit method is
 903   // static.  Non-static methods with the name "<clinit>" are not static
 904   // initializers. (older classfiles exempted for backward compatibility)
 905   return name() == vmSymbols::class_initializer_name() &&
 906          has_valid_initializer_flags();
 907 }
 908 
 909 bool Method::is_object_initializer() const {
 910    return name() == vmSymbols::object_initializer_name();
 911 }
 912 
 913 bool Method::needs_clinit_barrier() const {
 914   return is_static() && !method_holder()->is_initialized();
 915 }
 916 





 917 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 918   int length = method->checked_exceptions_length();
 919   if (length == 0) {  // common case
 920     return objArrayHandle(THREAD, Universe::the_empty_class_array());
 921   } else {
 922     methodHandle h_this(THREAD, method);
 923     objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
 924     objArrayHandle mirrors (THREAD, m_oop);
 925     for (int i = 0; i < length; i++) {
 926       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
 927       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
 928       if (log_is_enabled(Warning, exceptions) &&
 929           !k->is_subclass_of(vmClasses::Throwable_klass())) {
 930         ResourceMark rm(THREAD);
 931         log_warning(exceptions)(
 932           "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable",
 933           k->external_name(), method->external_name());
 934       }
 935       mirrors->obj_at_put(i, k->java_mirror());
 936     }

1176 }
1177 
1178 #if INCLUDE_CDS
1179 // Called by class data sharing to remove any entry points (which are not shared)
1180 void Method::unlink_method() {
1181   assert(CDSConfig::is_dumping_archive(), "sanity");
1182   _code = nullptr;
1183   _adapter = nullptr;
1184   _i2i_entry = nullptr;
1185   _from_compiled_entry = nullptr;
1186   _from_interpreted_entry = nullptr;
1187 
1188   if (is_native()) {
1189     *native_function_addr() = nullptr;
1190     set_signature_handler(nullptr);
1191   }
1192   NOT_PRODUCT(set_compiled_invocation_count(0);)
1193 
1194   clear_method_data();
1195   clear_method_counters();






1196   remove_unshareable_flags();
1197 }
1198 
1199 void Method::remove_unshareable_flags() {
1200   // clear all the flags that shouldn't be in the archived version
1201   assert(!is_old(), "must be");
1202   assert(!is_obsolete(), "must be");
1203   assert(!is_deleted(), "must be");
1204 
1205   set_is_prefixed_native(false);
1206   set_queued_for_compilation(false);

1207   set_is_not_c2_compilable(false);
1208   set_is_not_c1_compilable(false);
1209   set_is_not_c2_osr_compilable(false);
1210   set_on_stack_flag(false);
1211 }
1212 #endif
1213 
1214 // Called when the method_holder is getting linked. Setup entrypoints so the method
1215 // is ready to be called from interpreter, compiler, and vtables.
1216 void Method::link_method(const methodHandle& h_method, TRAPS) {




1217   // If the code cache is full, we may reenter this function for the
1218   // leftover methods that weren't linked.
1219   if (adapter() != nullptr) {
1220     return;
1221   }
1222   assert( _code == nullptr, "nothing compiled yet" );
1223 
1224   // Setup interpreter entrypoint
1225   assert(this == h_method(), "wrong h_method()" );
1226 
1227   assert(adapter() == nullptr, "init'd to null");
1228   address entry = Interpreter::entry_for_method(h_method);
1229   assert(entry != nullptr, "interpreter entry must be non-null");
1230   // Sets both _i2i_entry and _from_interpreted_entry
1231   set_interpreter_entry(entry);
1232 
1233   // Don't overwrite already registered native entries.
1234   if (is_native() && !has_native_function()) {
1235     set_native_function(
1236       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),

1245   // called from the vtable.  We need adapters on such methods that get loaded
1246   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1247   // problem we'll make these lazily later.
1248   (void) make_adapters(h_method, CHECK);
1249 
1250   // ONLY USE the h_method now as make_adapter may have blocked
1251 
1252   if (h_method->is_continuation_native_intrinsic()) {
1253     _from_interpreted_entry = nullptr;
1254     _from_compiled_entry = nullptr;
1255     _i2i_entry = nullptr;
1256     if (Continuations::enabled()) {
1257       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1258       AdapterHandlerLibrary::create_native_wrapper(h_method);
1259       if (!h_method->has_compiled_code()) {
1260         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1261       }
1262       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1263     }
1264   }





1265 }
1266 
1267 address Method::make_adapters(const methodHandle& mh, TRAPS) {


1268   // Adapters for compiled code are made eagerly here.  They are fairly
1269   // small (generally < 100 bytes) and quick to make (and cached and shared)
1270   // so making them eagerly shouldn't be too expensive.
1271   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1272   if (adapter == nullptr ) {
1273     if (!is_init_completed()) {
1274       // Don't throw exceptions during VM initialization because java.lang.* classes
1275       // might not have been initialized, causing problems when constructing the
1276       // Java exception object.
1277       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1278     } else {
1279       THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1280     }
1281   }
1282 
1283   mh->set_adapter_entry(adapter);
1284   mh->_from_compiled_entry = adapter->get_c2i_entry();
1285   return adapter->get_c2i_entry();
1286 }
1287 

1451   assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1452 
1453   log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1454 
1455   // invariant:   cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1456   name->increment_refcount();
1457   signature->increment_refcount();
1458 
1459   int cp_length = _imcp_limit;
1460   ClassLoaderData* loader_data = holder->class_loader_data();
1461   constantPoolHandle cp;
1462   {
1463     ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1464     cp = constantPoolHandle(THREAD, cp_oop);
1465   }
1466   cp->copy_fields(holder->constants());
1467   cp->set_pool_holder(holder);
1468   cp->symbol_at_put(_imcp_invoke_name,       name);
1469   cp->symbol_at_put(_imcp_invoke_signature,  signature);
1470   cp->set_has_preresolution();

1471 
1472   // decide on access bits:  public or not?
1473   int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1474   bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1475   if (must_be_static)  flags_bits |= JVM_ACC_STATIC;
1476   assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1477 
1478   methodHandle m;
1479   {
1480     InlineTableSizes sizes;
1481     Method* m_oop = Method::allocate(loader_data, 0,
1482                                      accessFlags_from(flags_bits), &sizes,
1483                                      ConstMethod::NORMAL,
1484                                      name,
1485                                      CHECK_(empty));
1486     m = methodHandle(THREAD, m_oop);
1487   }
1488   m->set_constants(cp());
1489   m->set_name_index(_imcp_invoke_name);
1490   m->set_signature_index(_imcp_invoke_signature);

1499   assert(m->intrinsic_id() == iid, "correctly predicted iid");
1500 #endif //ASSERT
1501 
1502   // Finally, set up its entry points.
1503   assert(m->can_be_statically_bound(), "");
1504   m->set_vtable_index(Method::nonvirtual_vtable_index);
1505   m->link_method(m, CHECK_(empty));
1506 
1507   if (iid == vmIntrinsics::_linkToNative) {
1508     m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1509   }
1510   if (log_is_enabled(Debug, methodhandles)) {
1511     LogTarget(Debug, methodhandles) lt;
1512     LogStream ls(lt);
1513     m->print_on(&ls);
1514   }
1515 
1516   return m;
1517 }
1518 










1519 Klass* Method::check_non_bcp_klass(Klass* klass) {
1520   if (klass != nullptr && klass->class_loader() != nullptr) {
1521     if (klass->is_objArray_klass())
1522       klass = ObjArrayKlass::cast(klass)->bottom_klass();
1523     return klass;
1524   }
1525   return nullptr;
1526 }
1527 
1528 
1529 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1530                                                 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1531   // Code below does not work for native methods - they should never get rewritten anyway
1532   assert(!m->is_native(), "cannot rewrite native methods");
1533   // Allocate new Method*
1534   AccessFlags flags = m->access_flags();
1535 
1536   ConstMethod* cm = m->constMethod();
1537   int checked_exceptions_len = cm->checked_exceptions_length();
1538   int localvariable_len = cm->localvariable_table_length();

1954   } else {
1955     return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
1956            ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
1957   }
1958 }
1959 
1960 int Method::backedge_count() const {
1961   MethodCounters* mcs = method_counters();
1962   MethodData* mdo = method_data();
1963   if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
1964       ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
1965     return InvocationCounter::count_limit;
1966   } else {
1967     return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
1968            ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
1969   }
1970 }
1971 
1972 int Method::highest_comp_level() const {
1973   const MethodCounters* mcs = method_counters();


1974   if (mcs != nullptr) {
1975     return mcs->highest_comp_level();
1976   } else {
1977     return CompLevel_none;
1978   }
1979 }
1980 
1981 int Method::highest_osr_comp_level() const {
1982   const MethodCounters* mcs = method_counters();
1983   if (mcs != nullptr) {
1984     return mcs->highest_osr_comp_level();
1985   } else {
1986     return CompLevel_none;
1987   }
1988 }
1989 
1990 void Method::set_highest_comp_level(int level) {
1991   MethodCounters* mcs = method_counters();
1992   if (mcs != nullptr) {
1993     mcs->set_highest_comp_level(level);
1994   }
1995 }

   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "cds/cppVtables.hpp"
  28 #include "cds/metaspaceShared.hpp"
  29 #include "classfile/classLoader.hpp"
  30 #include "classfile/classLoaderDataGraph.hpp"
  31 #include "classfile/metadataOnStackMark.hpp"
  32 #include "classfile/symbolTable.hpp"
  33 #include "classfile/systemDictionary.hpp"
  34 #include "classfile/vmClasses.hpp"
  35 #include "code/codeCache.hpp"
  36 #include "code/debugInfoRec.hpp"
  37 #include "compiler/compilationPolicy.hpp"
  38 #include "gc/shared/collectedHeap.inline.hpp"
  39 #include "interpreter/bytecodeStream.hpp"
  40 #include "interpreter/bytecodeTracer.hpp"
  41 #include "interpreter/bytecodes.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/oopMapCache.hpp"
  44 #include "logging/log.hpp"
  45 #include "logging/logStream.hpp"
  46 #include "logging/logTag.hpp"
  47 #include "memory/allocation.inline.hpp"
  48 #include "memory/metadataFactory.hpp"
  49 #include "memory/metaspaceClosure.hpp"
  50 #include "memory/oopFactory.hpp"
  51 #include "memory/resourceArea.hpp"
  52 #include "memory/universe.hpp"
  53 #include "nmt/memTracker.hpp"
  54 #include "oops/constMethod.hpp"
  55 #include "oops/constantPool.hpp"
  56 #include "oops/klass.inline.hpp"
  57 #include "oops/method.inline.hpp"
  58 #include "oops/methodData.hpp"
  59 #include "oops/objArrayKlass.hpp"
  60 #include "oops/objArrayOop.inline.hpp"
  61 #include "oops/oop.inline.hpp"
  62 #include "oops/symbol.hpp"
  63 #include "oops/trainingData.hpp"
  64 #include "prims/jvmtiExport.hpp"
  65 #include "prims/methodHandles.hpp"
  66 #include "runtime/atomic.hpp"
  67 #include "runtime/continuationEntry.hpp"
  68 #include "runtime/frame.inline.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/init.hpp"
  71 #include "runtime/java.hpp"
  72 #include "runtime/orderAccess.hpp"
  73 #include "runtime/perfData.hpp"
  74 #include "runtime/relocator.hpp"
  75 #include "runtime/safepointVerifiers.hpp"
  76 #include "runtime/sharedRuntime.hpp"
  77 #include "runtime/signature.hpp"
  78 #include "runtime/threads.hpp"
  79 #include "runtime/vm_version.hpp"
  80 #include "utilities/align.hpp"
  81 #include "utilities/quickSort.hpp"
  82 #include "utilities/vmError.hpp"
  83 #include "utilities/xmlstream.hpp"
  84 
  85 // Implementation of Method
  86 
  87 Method* Method::allocate(ClassLoaderData* loader_data,
  88                          int byte_code_size,
  89                          AccessFlags access_flags,
  90                          InlineTableSizes* sizes,
  91                          ConstMethod::MethodType method_type,
  92                          Symbol* name,
  93                          TRAPS) {

 164 }
 165 
 166 address Method::get_c2i_no_clinit_check_entry() {
 167   assert(VM_Version::supports_fast_class_init_checks(), "");
 168   assert(adapter() != nullptr, "must have");
 169   return adapter()->get_c2i_no_clinit_check_entry();
 170 }
 171 
 172 char* Method::name_and_sig_as_C_string() const {
 173   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 174 }
 175 
 176 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 177   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 178 }
 179 
 180 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 181   const char* klass_name = klass->external_name();
 182   int klass_name_len  = (int)strlen(klass_name);
 183   int method_name_len = method_name->utf8_length();
 184   int len             = klass_name_len + 2 + method_name_len + signature->utf8_length();
 185   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 186   strcpy(dest, klass_name);
 187   dest[klass_name_len + 0] = ':';
 188   dest[klass_name_len + 1] = ':';
 189   strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
 190   strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
 191   dest[len] = 0;
 192   return dest;
 193 }
 194 
 195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 196   Symbol* klass_name = klass->name();
 197   klass_name->as_klass_external_name(buf, size);
 198   int len = (int)strlen(buf);
 199 
 200   if (len < size - 1) {
 201     buf[len++] = '.';
 202 
 203     method_name->as_C_string(&(buf[len]), size - len);
 204     len = (int)strlen(buf);
 205 
 206     signature->as_C_string(&(buf[len]), size - len);
 207   }
 208 
 209   return buf;
 210 }

 372 address Method::bcp_from(address bcp) const {
 373   if (is_native() && bcp == nullptr) {
 374     return code_base();
 375   } else {
 376     return bcp;
 377   }
 378 }
 379 
 380 int Method::size(bool is_native) {
 381   // If native, then include pointers for native_function and signature_handler
 382   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 383   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 384   return align_metadata_size(header_size() + extra_words);
 385 }
 386 
 387 Symbol* Method::klass_name() const {
 388   return method_holder()->name();
 389 }
 390 
 391 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 392   LogStreamHandle(Trace, cds) lsh;
 393   if (lsh.is_enabled()) {
 394     lsh.print("Iter(Method): %p ", this);
 395     print_external_name(&lsh);
 396     lsh.cr();
 397   }
 398   if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
 399     // holder is null for MH intrinsic methods
 400     it->push(&_constMethod, MetaspaceClosure::_writable);
 401   } else {
 402     it->push(&_constMethod);
 403   }
 404   it->push(&_method_data);
 405   it->push(&_method_counters);
 406   NOT_PRODUCT(it->push(&_name);)
 407 }
 408 
 409 #if INCLUDE_CDS
 410 // Attempt to return method to original state.  Clear any pointers
 411 // (to objects outside the shared spaces).  We won't be able to predict
 412 // where they should point in a new JVM.  Further initialize some
 413 // entries now in order allow them to be write protected later.
 414 
 415 void Method::remove_unshareable_info() {
 416   unlink_method();
 417   if (method_data() != nullptr) {
 418     method_data()->remove_unshareable_info();
 419   }
 420   if (method_counters() != nullptr) {
 421     method_counters()->remove_unshareable_info();
 422   }
 423   JFR_ONLY(REMOVE_METHOD_ID(this);)
 424 }
 425 
 426 void Method::restore_unshareable_info(TRAPS) {
 427   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 428   if (method_data() != nullptr) {
 429     method_data()->restore_unshareable_info(CHECK);
 430   }
 431   if (method_counters() != nullptr) {
 432     method_counters()->restore_unshareable_info(CHECK);
 433   }
 434   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
 435   assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
 436 }
 437 #endif
 438 
 439 void Method::set_vtable_index(int index) {
 440   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 441     // At runtime initialize_vtable is rerun as part of link_class_impl()
 442     // for a shared class loaded by the non-boot loader to obtain the loader
 443     // constraints based on the runtime classloaders' context.
 444     return; // don't write into the shared class
 445   } else {
 446     _vtable_index = index;
 447   }
 448 }
 449 
 450 void Method::set_itable_index(int index) {
 451   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 452     // At runtime initialize_itable is rerun as part of link_class_impl()
 453     // for a shared class loaded by the non-boot loader to obtain the loader
 454     // constraints based on the runtime classloaders' context. The dumptime
 455     // itable index should be the same as the runtime index.

 582   // Counting based on signed int counters tends to overflow with
 583   // longer-running workloads on fast machines. The counters under
 584   // consideration here, however, are limited in range by counting
 585   // logic. See InvocationCounter:count_limit for example.
 586   // No "overflow precautions" need to be implemented here.
 587   st->print_cr ("  interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
 588   st->print_cr ("  invocation_counter:           " INT32_FORMAT_W(11), invocation_count());
 589   st->print_cr ("  backedge_counter:             " INT32_FORMAT_W(11), backedge_count());
 590 
 591   if (method_data() != nullptr) {
 592     st->print_cr ("  decompile_count:              " UINT32_FORMAT_W(11), method_data()->decompile_count());
 593   }
 594 
 595 #ifndef PRODUCT
 596   if (CountCompiledCalls) {
 597     st->print_cr ("  compiled_invocation_count:    " INT64_FORMAT_W(11), compiled_invocation_count());
 598   }
 599 #endif
 600 }
 601 
 602 MethodTrainingData* Method::training_data_or_null() const {
 603   MethodCounters* mcs = method_counters();
 604   if (mcs == nullptr) {
 605     return nullptr;
 606   } else {
 607     return mcs->method_training_data();
 608   }
 609 }
 610 
 611 bool Method::init_training_data(MethodTrainingData* tdata) {
 612   MethodCounters* mcs = method_counters();
 613   if (mcs == nullptr) {
 614     return false;
 615   } else {
 616     return mcs->init_method_training_data(tdata);
 617   }
 618 }
 619 
 620 bool Method::install_training_method_data(const methodHandle& method) {
 621   MethodTrainingData* mtd = MethodTrainingData::find(method);
 622   if (mtd != nullptr && mtd->has_holder() && mtd->final_profile() != nullptr &&
 623       mtd->holder() == method() && mtd->final_profile()->method() == method()) { // FIXME
 624     Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
 625     return true;
 626   }
 627   return false;
 628 }
 629 
 630 // Build a MethodData* object to hold profiling information collected on this
 631 // method when requested.
 632 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
 633   if (install_training_method_data(method)) {
 634     return;
 635   }
 636   // Do not profile the method if metaspace has hit an OOM previously
 637   // allocating profiling data. Callers clear pending exception so don't
 638   // add one here.
 639   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 640     return;
 641   }
 642 
 643   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 644   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 645   if (HAS_PENDING_EXCEPTION) {
 646     CompileBroker::log_metaspace_failure();
 647     ClassLoaderDataGraph::set_metaspace_oom(true);
 648     return;   // return the exception (which is cleared)
 649   }
 650 
 651   if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
 652     MetadataFactory::free_metadata(loader_data, method_data);
 653     return;
 654   }
 655 
 656   /*
 657   LogStreamHandle(Info, mdo) lsh;
 658   if (lsh.is_enabled()) {
 659     ResourceMark rm(THREAD);
 660     lsh.print("build_profiling_method_data for ");
 661     method->print_name(&lsh);
 662     lsh.cr();
 663   }
 664   */
 665   if (ForceProfiling && TrainingData::need_data()) {
 666     MethodTrainingData* mtd = MethodTrainingData::make(method, false);
 667     guarantee(mtd != nullptr, "");
 668   }
 669   if (PrintMethodData) {
 670     ResourceMark rm(THREAD);
 671     tty->print("build_profiling_method_data for ");
 672     method->print_name(tty);
 673     tty->cr();
 674     // At the end of the run, the MDO, full of data, will be dumped.
 675   }
 676 }
 677 
 678 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 679   // Do not profile the method if metaspace has hit an OOM previously
 680   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 681     return nullptr;
 682   }
 683 
 684   methodHandle mh(current, m);
 685   MethodCounters* counters;
 686   if (current->is_Java_thread()) {
 687     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 688     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 689     // if needed.
 690     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 691     if (HAS_PENDING_EXCEPTION) {
 692       CLEAR_PENDING_EXCEPTION;
 693     }
 694   } else {
 695     // Call metaspace allocation that doesn't throw exception if the
 696     // current thread isn't a JavaThread, ie. the VMThread.
 697     counters = MethodCounters::allocate_no_exception(mh);
 698   }
 699 
 700   if (counters == nullptr) {
 701     CompileBroker::log_metaspace_failure();
 702     ClassLoaderDataGraph::set_metaspace_oom(true);
 703     return nullptr;
 704   }
 705 
 706   if (!mh->init_method_counters(counters)) {
 707     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 708   }
 709 
 710   if (ForceProfiling && TrainingData::need_data()) {
 711     MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
 712     guarantee(mtd != nullptr, "");
 713   }
 714 
 715   return mh->method_counters();
 716 }
 717 
 718 bool Method::init_method_counters(MethodCounters* counters) {
 719   // Try to install a pointer to MethodCounters, return true on success.
 720   return Atomic::replace_if_null(&_method_counters, counters);
 721 }
 722 
 723 void Method::set_exception_handler_entered(int handler_bci) {
 724   if (ProfileExceptionHandlers) {
 725     MethodData* mdo = method_data();
 726     if (mdo != nullptr) {
 727       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 728       handler_data.set_exception_handler_entered();
 729     }
 730   }
 731 }
 732 
 733 int Method::extra_stack_words() {
 734   // not an inline function, to avoid a header dependency on Interpreter

 968   return (is_static() ||
 969           method_holder()->major_version() < 51);
 970 }
 971 
 972 bool Method::is_static_initializer() const {
 973   // For classfiles version 51 or greater, ensure that the clinit method is
 974   // static.  Non-static methods with the name "<clinit>" are not static
 975   // initializers. (older classfiles exempted for backward compatibility)
 976   return name() == vmSymbols::class_initializer_name() &&
 977          has_valid_initializer_flags();
 978 }
 979 
 980 bool Method::is_object_initializer() const {
 981    return name() == vmSymbols::object_initializer_name();
 982 }
 983 
 984 bool Method::needs_clinit_barrier() const {
 985   return is_static() && !method_holder()->is_initialized();
 986 }
 987 
 988 bool Method::code_has_clinit_barriers() const {
 989   CompiledMethod* nm = code();
 990   return (nm != nullptr) && nm->has_clinit_barriers();
 991 }
 992 
 993 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 994   int length = method->checked_exceptions_length();
 995   if (length == 0) {  // common case
 996     return objArrayHandle(THREAD, Universe::the_empty_class_array());
 997   } else {
 998     methodHandle h_this(THREAD, method);
 999     objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
1000     objArrayHandle mirrors (THREAD, m_oop);
1001     for (int i = 0; i < length; i++) {
1002       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
1003       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
1004       if (log_is_enabled(Warning, exceptions) &&
1005           !k->is_subclass_of(vmClasses::Throwable_klass())) {
1006         ResourceMark rm(THREAD);
1007         log_warning(exceptions)(
1008           "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable",
1009           k->external_name(), method->external_name());
1010       }
1011       mirrors->obj_at_put(i, k->java_mirror());
1012     }

1252 }
1253 
1254 #if INCLUDE_CDS
1255 // Called by class data sharing to remove any entry points (which are not shared)
1256 void Method::unlink_method() {
1257   assert(CDSConfig::is_dumping_archive(), "sanity");
1258   _code = nullptr;
1259   _adapter = nullptr;
1260   _i2i_entry = nullptr;
1261   _from_compiled_entry = nullptr;
1262   _from_interpreted_entry = nullptr;
1263 
1264   if (is_native()) {
1265     *native_function_addr() = nullptr;
1266     set_signature_handler(nullptr);
1267   }
1268   NOT_PRODUCT(set_compiled_invocation_count(0);)
1269 
1270   clear_method_data();
1271   clear_method_counters();
1272   clear_is_not_c1_compilable();
1273   clear_is_not_c1_osr_compilable();
1274   clear_is_not_c2_compilable();
1275   clear_is_not_c2_osr_compilable();
1276   clear_queued_for_compilation();
1277   set_pending_queue_processed(false);
1278   remove_unshareable_flags();
1279 }
1280 
1281 void Method::remove_unshareable_flags() {
1282   // clear all the flags that shouldn't be in the archived version
1283   assert(!is_old(), "must be");
1284   assert(!is_obsolete(), "must be");
1285   assert(!is_deleted(), "must be");
1286 
1287   set_is_prefixed_native(false);
1288   set_queued_for_compilation(false);
1289   set_pending_queue_processed(false);
1290   set_is_not_c2_compilable(false);
1291   set_is_not_c1_compilable(false);
1292   set_is_not_c2_osr_compilable(false);
1293   set_on_stack_flag(false);
1294 }
1295 #endif
1296 
1297 // Called when the method_holder is getting linked. Setup entrypoints so the method
1298 // is ready to be called from interpreter, compiler, and vtables.
1299 void Method::link_method(const methodHandle& h_method, TRAPS) {
1300   if (UsePerfData) {
1301     ClassLoader::perf_ik_link_methods_count()->inc();
1302   }
1303 
1304   // If the code cache is full, we may reenter this function for the
1305   // leftover methods that weren't linked.
1306   if (adapter() != nullptr) {
1307     return;
1308   }
1309   assert( _code == nullptr, "nothing compiled yet" );
1310 
1311   // Setup interpreter entrypoint
1312   assert(this == h_method(), "wrong h_method()" );
1313 
1314   assert(adapter() == nullptr, "init'd to null");
1315   address entry = Interpreter::entry_for_method(h_method);
1316   assert(entry != nullptr, "interpreter entry must be non-null");
1317   // Sets both _i2i_entry and _from_interpreted_entry
1318   set_interpreter_entry(entry);
1319 
1320   // Don't overwrite already registered native entries.
1321   if (is_native() && !has_native_function()) {
1322     set_native_function(
1323       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),

1332   // called from the vtable.  We need adapters on such methods that get loaded
1333   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1334   // problem we'll make these lazily later.
1335   (void) make_adapters(h_method, CHECK);
1336 
1337   // ONLY USE the h_method now as make_adapter may have blocked
1338 
1339   if (h_method->is_continuation_native_intrinsic()) {
1340     _from_interpreted_entry = nullptr;
1341     _from_compiled_entry = nullptr;
1342     _i2i_entry = nullptr;
1343     if (Continuations::enabled()) {
1344       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1345       AdapterHandlerLibrary::create_native_wrapper(h_method);
1346       if (!h_method->has_compiled_code()) {
1347         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1348       }
1349       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1350     }
1351   }
1352   if (_preload_code != nullptr) {
1353     MutexLocker ml(CompiledMethod_lock, Mutex::_no_safepoint_check_flag);
1354     set_code(h_method, _preload_code);
1355     assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
1356   }
1357 }
1358 
1359 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1360   PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1361 
1362   // Adapters for compiled code are made eagerly here.  They are fairly
1363   // small (generally < 100 bytes) and quick to make (and cached and shared)
1364   // so making them eagerly shouldn't be too expensive.
1365   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1366   if (adapter == nullptr ) {
1367     if (!is_init_completed()) {
1368       // Don't throw exceptions during VM initialization because java.lang.* classes
1369       // might not have been initialized, causing problems when constructing the
1370       // Java exception object.
1371       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1372     } else {
1373       THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1374     }
1375   }
1376 
1377   mh->set_adapter_entry(adapter);
1378   mh->_from_compiled_entry = adapter->get_c2i_entry();
1379   return adapter->get_c2i_entry();
1380 }
1381 

1545   assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1546 
1547   log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1548 
1549   // invariant:   cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1550   name->increment_refcount();
1551   signature->increment_refcount();
1552 
1553   int cp_length = _imcp_limit;
1554   ClassLoaderData* loader_data = holder->class_loader_data();
1555   constantPoolHandle cp;
1556   {
1557     ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1558     cp = constantPoolHandle(THREAD, cp_oop);
1559   }
1560   cp->copy_fields(holder->constants());
1561   cp->set_pool_holder(holder);
1562   cp->symbol_at_put(_imcp_invoke_name,       name);
1563   cp->symbol_at_put(_imcp_invoke_signature,  signature);
1564   cp->set_has_preresolution();
1565   cp->set_is_for_method_handle_intrinsic();
1566 
1567   // decide on access bits:  public or not?
1568   int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1569   bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1570   if (must_be_static)  flags_bits |= JVM_ACC_STATIC;
1571   assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1572 
1573   methodHandle m;
1574   {
1575     InlineTableSizes sizes;
1576     Method* m_oop = Method::allocate(loader_data, 0,
1577                                      accessFlags_from(flags_bits), &sizes,
1578                                      ConstMethod::NORMAL,
1579                                      name,
1580                                      CHECK_(empty));
1581     m = methodHandle(THREAD, m_oop);
1582   }
1583   m->set_constants(cp());
1584   m->set_name_index(_imcp_invoke_name);
1585   m->set_signature_index(_imcp_invoke_signature);

1594   assert(m->intrinsic_id() == iid, "correctly predicted iid");
1595 #endif //ASSERT
1596 
1597   // Finally, set up its entry points.
1598   assert(m->can_be_statically_bound(), "");
1599   m->set_vtable_index(Method::nonvirtual_vtable_index);
1600   m->link_method(m, CHECK_(empty));
1601 
1602   if (iid == vmIntrinsics::_linkToNative) {
1603     m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1604   }
1605   if (log_is_enabled(Debug, methodhandles)) {
1606     LogTarget(Debug, methodhandles) lt;
1607     LogStream ls(lt);
1608     m->print_on(&ls);
1609   }
1610 
1611   return m;
1612 }
1613 
1614 #if INCLUDE_CDS
1615 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1616   m->link_method(m, CHECK);
1617 
1618   if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1619     m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1620   }
1621 }
1622 #endif
1623 
1624 Klass* Method::check_non_bcp_klass(Klass* klass) {
1625   if (klass != nullptr && klass->class_loader() != nullptr) {
1626     if (klass->is_objArray_klass())
1627       klass = ObjArrayKlass::cast(klass)->bottom_klass();
1628     return klass;
1629   }
1630   return nullptr;
1631 }
1632 
1633 
1634 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1635                                                 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1636   // Code below does not work for native methods - they should never get rewritten anyway
1637   assert(!m->is_native(), "cannot rewrite native methods");
1638   // Allocate new Method*
1639   AccessFlags flags = m->access_flags();
1640 
1641   ConstMethod* cm = m->constMethod();
1642   int checked_exceptions_len = cm->checked_exceptions_length();
1643   int localvariable_len = cm->localvariable_table_length();

2059   } else {
2060     return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
2061            ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
2062   }
2063 }
2064 
2065 int Method::backedge_count() const {
2066   MethodCounters* mcs = method_counters();
2067   MethodData* mdo = method_data();
2068   if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
2069       ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
2070     return InvocationCounter::count_limit;
2071   } else {
2072     return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
2073            ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
2074   }
2075 }
2076 
2077 int Method::highest_comp_level() const {
2078   const MethodCounters* mcs = method_counters();
2079   CompiledMethod* nm = code();
2080   int level = (nm != nullptr) ? nm->comp_level() : CompLevel_none;
2081   if (mcs != nullptr) {
2082     return MAX2(mcs->highest_comp_level(), level);
2083   } else {
2084     return CompLevel_none;
2085   }
2086 }
2087 
2088 int Method::highest_osr_comp_level() const {
2089   const MethodCounters* mcs = method_counters();
2090   if (mcs != nullptr) {
2091     return mcs->highest_osr_comp_level();
2092   } else {
2093     return CompLevel_none;
2094   }
2095 }
2096 
2097 void Method::set_highest_comp_level(int level) {
2098   MethodCounters* mcs = method_counters();
2099   if (mcs != nullptr) {
2100     mcs->set_highest_comp_level(level);
2101   }
2102 }
< prev index next >