< prev index next >

src/hotspot/share/oops/method.cpp

Print this page

 166 }
 167 
 168 address Method::get_c2i_no_clinit_check_entry() {
 169   assert(VM_Version::supports_fast_class_init_checks(), "");
 170   assert(adapter() != nullptr, "must have");
 171   return adapter()->get_c2i_no_clinit_check_entry();
 172 }
 173 
 174 char* Method::name_and_sig_as_C_string() const {
 175   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 176 }
 177 
 178 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 179   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 180 }
 181 
 182 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 183   const char* klass_name = klass->external_name();
 184   int klass_name_len  = (int)strlen(klass_name);
 185   int method_name_len = method_name->utf8_length();
 186   int len             = klass_name_len + 1 + method_name_len + signature->utf8_length();
 187   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 188   strcpy(dest, klass_name);
 189   dest[klass_name_len] = '.';
 190   strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
 191   strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());

 192   dest[len] = 0;
 193   return dest;
 194 }
 195 
 196 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 197   Symbol* klass_name = klass->name();
 198   klass_name->as_klass_external_name(buf, size);
 199   int len = (int)strlen(buf);
 200 
 201   if (len < size - 1) {
 202     buf[len++] = '.';
 203 
 204     method_name->as_C_string(&(buf[len]), size - len);
 205     len = (int)strlen(buf);
 206 
 207     signature->as_C_string(&(buf[len]), size - len);
 208   }
 209 
 210   return buf;
 211 }

 371 address Method::bcp_from(address bcp) const {
 372   if (is_native() && bcp == nullptr) {
 373     return code_base();
 374   } else {
 375     return bcp;
 376   }
 377 }
 378 
 379 int Method::size(bool is_native) {
 380   // If native, then include pointers for native_function and signature_handler
 381   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 382   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 383   return align_metadata_size(header_size() + extra_words);
 384 }
 385 
 386 Symbol* Method::klass_name() const {
 387   return method_holder()->name();
 388 }
 389 
 390 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 391   log_trace(aot)("Iter(Method): %p", this);
 392 
 393   if (!method_holder()->is_rewritten()) {





 394     it->push(&_constMethod, MetaspaceClosure::_writable);
 395   } else {
 396     it->push(&_constMethod);
 397   }
 398   it->push(&_adapter);
 399   it->push(&_method_data);
 400   it->push(&_method_counters);
 401   NOT_PRODUCT(it->push(&_name);)
 402 }
 403 
 404 #if INCLUDE_CDS
 405 // Attempt to return method to original state.  Clear any pointers
 406 // (to objects outside the shared spaces).  We won't be able to predict
 407 // where they should point in a new JVM.  Further initialize some
 408 // entries now in order allow them to be write protected later.
 409 
 410 void Method::remove_unshareable_info() {
 411   unlink_method();
 412   if (method_data() != nullptr) {
 413     method_data()->remove_unshareable_info();
 414   }
 415   if (method_counters() != nullptr) {
 416     method_counters()->remove_unshareable_info();
 417   }
 418   if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
 419     _adapter->remove_unshareable_info();
 420     _adapter = nullptr;
 421   }






 422   JFR_ONLY(REMOVE_METHOD_ID(this);)
 423 }
 424 
 425 void Method::restore_unshareable_info(TRAPS) {
 426   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 427   if (method_data() != nullptr) {
 428     method_data()->restore_unshareable_info(CHECK);
 429   }
 430   if (method_counters() != nullptr) {
 431     method_counters()->restore_unshareable_info(CHECK);
 432   }
 433   if (_adapter != nullptr) {
 434     assert(_adapter->is_linked(), "must be");
 435     _from_compiled_entry = _adapter->get_c2i_entry();
 436   }






 437   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");

 438 }
 439 #endif
 440 
 441 void Method::set_vtable_index(int index) {
 442   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 443     // At runtime initialize_vtable is rerun as part of link_class_impl()
 444     // for a shared class loaded by the non-boot loader to obtain the loader
 445     // constraints based on the runtime classloaders' context.
 446     return; // don't write into the shared class
 447   } else {
 448     _vtable_index = index;
 449   }
 450 }
 451 
 452 void Method::set_itable_index(int index) {
 453   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 454     // At runtime initialize_itable is rerun as part of link_class_impl()
 455     // for a shared class loaded by the non-boot loader to obtain the loader
 456     // constraints based on the runtime classloaders' context. The dumptime
 457     // itable index should be the same as the runtime index.

 641   // Do not profile the method if metaspace has hit an OOM previously
 642   // allocating profiling data. Callers clear pending exception so don't
 643   // add one here.
 644   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 645     return;
 646   }
 647 
 648   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 649   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 650   if (HAS_PENDING_EXCEPTION) {
 651     CompileBroker::log_metaspace_failure();
 652     ClassLoaderDataGraph::set_metaspace_oom(true);
 653     return;   // return the exception (which is cleared)
 654   }
 655 
 656   if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
 657     MetadataFactory::free_metadata(loader_data, method_data);
 658     return;
 659   }
 660 
 661   if (PrintMethodData && (Verbose || WizardMode)) {





 662     ResourceMark rm(THREAD);
 663     tty->print("build_profiling_method_data for ");
 664     method->print_name(tty);
 665     tty->cr();
 666     // At the end of the run, the MDO, full of data, will be dumped.
 667   }
 668 }
 669 
 670 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 671   // Do not profile the method if metaspace has hit an OOM previously
 672   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 673     return nullptr;
 674   }
 675 
 676   methodHandle mh(current, m);
 677   MethodCounters* counters;
 678   if (current->is_Java_thread()) {
 679     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 680     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 681     // if needed.
 682     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 683     if (HAS_PENDING_EXCEPTION) {
 684       CLEAR_PENDING_EXCEPTION;
 685     }
 686   } else {
 687     // Call metaspace allocation that doesn't throw exception if the
 688     // current thread isn't a JavaThread, ie. the VMThread.
 689     counters = MethodCounters::allocate_no_exception(mh);
 690   }
 691 
 692   if (counters == nullptr) {
 693     CompileBroker::log_metaspace_failure();
 694     ClassLoaderDataGraph::set_metaspace_oom(true);
 695     return nullptr;
 696   }
 697 
 698   if (!mh->init_method_counters(counters)) {
 699     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 700   }
 701 





 702   return mh->method_counters();
 703 }
 704 
 705 bool Method::init_method_counters(MethodCounters* counters) {
 706   // Try to install a pointer to MethodCounters, return true on success.
 707   return Atomic::replace_if_null(&_method_counters, counters);
 708 }
 709 
 710 void Method::set_exception_handler_entered(int handler_bci) {
 711   if (ProfileExceptionHandlers) {
 712     MethodData* mdo = method_data();
 713     if (mdo != nullptr) {
 714       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 715       handler_data.set_exception_handler_entered();
 716     }
 717   }
 718 }
 719 
 720 int Method::extra_stack_words() {
 721   // not an inline function, to avoid a header dependency on Interpreter

 908   return (is_static() ||
 909           method_holder()->major_version() < 51);
 910 }
 911 
 912 bool Method::is_static_initializer() const {
 913   // For classfiles version 51 or greater, ensure that the clinit method is
 914   // static.  Non-static methods with the name "<clinit>" are not static
 915   // initializers. (older classfiles exempted for backward compatibility)
 916   return name() == vmSymbols::class_initializer_name() &&
 917          has_valid_initializer_flags();
 918 }
 919 
 920 bool Method::is_object_initializer() const {
 921    return name() == vmSymbols::object_initializer_name();
 922 }
 923 
 924 bool Method::needs_clinit_barrier() const {
 925   return is_static() && !method_holder()->is_initialized();
 926 }
 927 





 928 bool Method::is_object_wait0() const {
 929   return klass_name() == vmSymbols::java_lang_Object()
 930          && name() == vmSymbols::wait_name();
 931 }
 932 
 933 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 934   int length = method->checked_exceptions_length();
 935   if (length == 0) {  // common case
 936     return objArrayHandle(THREAD, Universe::the_empty_class_array());
 937   } else {
 938     methodHandle h_this(THREAD, method);
 939     objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
 940     objArrayHandle mirrors (THREAD, m_oop);
 941     for (int i = 0; i < length; i++) {
 942       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
 943       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
 944       if (log_is_enabled(Warning, exceptions) &&
 945           !k->is_subclass_of(vmClasses::Throwable_klass())) {
 946         ResourceMark rm(THREAD);
 947         log_warning(exceptions)(

1198   if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1199     _adapter = nullptr;
1200   }
1201   _i2i_entry = nullptr;
1202   _from_compiled_entry = nullptr;
1203   _from_interpreted_entry = nullptr;
1204 
1205   if (is_native()) {
1206     *native_function_addr() = nullptr;
1207     set_signature_handler(nullptr);
1208   }
1209   NOT_PRODUCT(set_compiled_invocation_count(0);)
1210 
1211   clear_method_data();
1212   clear_method_counters();
1213   clear_is_not_c1_compilable();
1214   clear_is_not_c1_osr_compilable();
1215   clear_is_not_c2_compilable();
1216   clear_is_not_c2_osr_compilable();
1217   clear_queued_for_compilation();

1218 
1219   remove_unshareable_flags();
1220 }
1221 
1222 void Method::remove_unshareable_flags() {
1223   // clear all the flags that shouldn't be in the archived version
1224   assert(!is_old(), "must be");
1225   assert(!is_obsolete(), "must be");
1226   assert(!is_deleted(), "must be");
1227 
1228   set_is_prefixed_native(false);
1229   set_queued_for_compilation(false);

1230   set_is_not_c2_compilable(false);
1231   set_is_not_c1_compilable(false);
1232   set_is_not_c2_osr_compilable(false);
1233   set_on_stack_flag(false);


1234 }
1235 #endif
1236 
1237 // Called when the method_holder is getting linked. Setup entrypoints so the method
1238 // is ready to be called from interpreter, compiler, and vtables.
1239 void Method::link_method(const methodHandle& h_method, TRAPS) {
1240   if (log_is_enabled(Info, perf, class, link)) {
1241     ClassLoader::perf_ik_link_methods_count()->inc();
1242   }
1243 
1244   // If the code cache is full, we may reenter this function for the
1245   // leftover methods that weren't linked.
1246   if (adapter() != nullptr) {
1247     if (adapter()->is_shared()) {
1248       assert(adapter()->is_linked(), "Adapter is shared but not linked");
1249     } else {
1250       return;
1251     }
1252   }
1253   assert( _code == nullptr, "nothing compiled yet" );

1279   if (_adapter == nullptr) {
1280     (void) make_adapters(h_method, CHECK);
1281     assert(adapter()->is_linked(), "Adapter must have been linked");
1282   }
1283 
1284   // ONLY USE the h_method now as make_adapter may have blocked
1285 
1286   if (h_method->is_continuation_native_intrinsic()) {
1287     _from_interpreted_entry = nullptr;
1288     _from_compiled_entry = nullptr;
1289     _i2i_entry = nullptr;
1290     if (Continuations::enabled()) {
1291       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1292       AdapterHandlerLibrary::create_native_wrapper(h_method);
1293       if (!h_method->has_compiled_code()) {
1294         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1295       }
1296       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1297     }
1298   }





1299 }
1300 
1301 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1302   PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1303 
1304   // Adapters for compiled code are made eagerly here.  They are fairly
1305   // small (generally < 100 bytes) and quick to make (and cached and shared)
1306   // so making them eagerly shouldn't be too expensive.
1307   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1308   if (adapter == nullptr ) {
1309     if (!is_init_completed()) {
1310       // Don't throw exceptions during VM initialization because java.lang.* classes
1311       // might not have been initialized, causing problems when constructing the
1312       // Java exception object.
1313       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1314     } else {
1315       THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1316     }
1317   }
1318 
1319   mh->set_adapter_entry(adapter);
1320   mh->_from_compiled_entry = adapter->get_c2i_entry();
1321   return adapter->get_c2i_entry();
1322 }

 166 }
 167 
 168 address Method::get_c2i_no_clinit_check_entry() {
 169   assert(VM_Version::supports_fast_class_init_checks(), "");
 170   assert(adapter() != nullptr, "must have");
 171   return adapter()->get_c2i_no_clinit_check_entry();
 172 }
 173 
 174 char* Method::name_and_sig_as_C_string() const {
 175   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 176 }
 177 
 178 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 179   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 180 }
 181 
 182 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 183   const char* klass_name = klass->external_name();
 184   int klass_name_len  = (int)strlen(klass_name);
 185   int method_name_len = method_name->utf8_length();
 186   int len             = klass_name_len + 2 + method_name_len + signature->utf8_length();
 187   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 188   strcpy(dest, klass_name);
 189   dest[klass_name_len + 0] = ':';
 190   dest[klass_name_len + 1] = ':';
 191   strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
 192   strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
 193   dest[len] = 0;
 194   return dest;
 195 }
 196 
 197 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 198   Symbol* klass_name = klass->name();
 199   klass_name->as_klass_external_name(buf, size);
 200   int len = (int)strlen(buf);
 201 
 202   if (len < size - 1) {
 203     buf[len++] = '.';
 204 
 205     method_name->as_C_string(&(buf[len]), size - len);
 206     len = (int)strlen(buf);
 207 
 208     signature->as_C_string(&(buf[len]), size - len);
 209   }
 210 
 211   return buf;
 212 }

 372 address Method::bcp_from(address bcp) const {
 373   if (is_native() && bcp == nullptr) {
 374     return code_base();
 375   } else {
 376     return bcp;
 377   }
 378 }
 379 
 380 int Method::size(bool is_native) {
 381   // If native, then include pointers for native_function and signature_handler
 382   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 383   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 384   return align_metadata_size(header_size() + extra_words);
 385 }
 386 
 387 Symbol* Method::klass_name() const {
 388   return method_holder()->name();
 389 }
 390 
 391 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 392   LogStreamHandle(Trace, aot) lsh;
 393   if (lsh.is_enabled()) {
 394     lsh.print("Iter(Method): %p ", this);
 395     print_external_name(&lsh);
 396     lsh.cr();
 397   }
 398   if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
 399     // holder is null for MH intrinsic methods
 400     it->push(&_constMethod, MetaspaceClosure::_writable);
 401   } else {
 402     it->push(&_constMethod);
 403   }
 404   it->push(&_adapter);
 405   it->push(&_method_data);
 406   it->push(&_method_counters);
 407   NOT_PRODUCT(it->push(&_name);)
 408 }
 409 
 410 #if INCLUDE_CDS
 411 // Attempt to return method to original state.  Clear any pointers
 412 // (to objects outside the shared spaces).  We won't be able to predict
 413 // where they should point in a new JVM.  Further initialize some
 414 // entries now in order allow them to be write protected later.
 415 
 416 void Method::remove_unshareable_info() {
 417   unlink_method();
 418   if (method_data() != nullptr) {
 419     method_data()->remove_unshareable_info();
 420   }
 421   if (method_counters() != nullptr) {
 422     method_counters()->remove_unshareable_info();
 423   }
 424   if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
 425     _adapter->remove_unshareable_info();
 426     _adapter = nullptr;
 427   }
 428   if (method_data() != nullptr) {
 429     method_data()->remove_unshareable_info();
 430   }
 431   if (method_counters() != nullptr) {
 432     method_counters()->remove_unshareable_info();
 433   }
 434   JFR_ONLY(REMOVE_METHOD_ID(this);)
 435 }
 436 
 437 void Method::restore_unshareable_info(TRAPS) {
 438   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 439   if (method_data() != nullptr) {
 440     method_data()->restore_unshareable_info(CHECK);
 441   }
 442   if (method_counters() != nullptr) {
 443     method_counters()->restore_unshareable_info(CHECK);
 444   }
 445   if (_adapter != nullptr) {
 446     assert(_adapter->is_linked(), "must be");
 447     _from_compiled_entry = _adapter->get_c2i_entry();
 448   }
 449   if (method_data() != nullptr) {
 450     method_data()->restore_unshareable_info(CHECK);
 451   }
 452   if (method_counters() != nullptr) {
 453     method_counters()->restore_unshareable_info(CHECK);
 454   }
 455   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
 456   assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
 457 }
 458 #endif
 459 
 460 void Method::set_vtable_index(int index) {
 461   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 462     // At runtime initialize_vtable is rerun as part of link_class_impl()
 463     // for a shared class loaded by the non-boot loader to obtain the loader
 464     // constraints based on the runtime classloaders' context.
 465     return; // don't write into the shared class
 466   } else {
 467     _vtable_index = index;
 468   }
 469 }
 470 
 471 void Method::set_itable_index(int index) {
 472   if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 473     // At runtime initialize_itable is rerun as part of link_class_impl()
 474     // for a shared class loaded by the non-boot loader to obtain the loader
 475     // constraints based on the runtime classloaders' context. The dumptime
 476     // itable index should be the same as the runtime index.

 660   // Do not profile the method if metaspace has hit an OOM previously
 661   // allocating profiling data. Callers clear pending exception so don't
 662   // add one here.
 663   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 664     return;
 665   }
 666 
 667   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 668   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 669   if (HAS_PENDING_EXCEPTION) {
 670     CompileBroker::log_metaspace_failure();
 671     ClassLoaderDataGraph::set_metaspace_oom(true);
 672     return;   // return the exception (which is cleared)
 673   }
 674 
 675   if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
 676     MetadataFactory::free_metadata(loader_data, method_data);
 677     return;
 678   }
 679 
 680   if (ForceProfiling && TrainingData::need_data()) {
 681     MethodTrainingData* mtd = MethodTrainingData::make(method, false);
 682     guarantee(mtd != nullptr, "");
 683   }
 684 
 685   if (PrintMethodData) {
 686     ResourceMark rm(THREAD);
 687     tty->print("build_profiling_method_data for ");
 688     method->print_name(tty);
 689     tty->cr();
 690     // At the end of the run, the MDO, full of data, will be dumped.
 691   }
 692 }
 693 
 694 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 695   // Do not profile the method if metaspace has hit an OOM previously
 696   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 697     return nullptr;
 698   }
 699 
 700   methodHandle mh(current, m);
 701   MethodCounters* counters;
 702   if (current->is_Java_thread()) {
 703     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 704     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 705     // if needed.
 706     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 707     if (HAS_PENDING_EXCEPTION) {
 708       CLEAR_PENDING_EXCEPTION;
 709     }
 710   } else {
 711     // Call metaspace allocation that doesn't throw exception if the
 712     // current thread isn't a JavaThread, ie. the VMThread.
 713     counters = MethodCounters::allocate_no_exception(mh);
 714   }
 715 
 716   if (counters == nullptr) {
 717     CompileBroker::log_metaspace_failure();
 718     ClassLoaderDataGraph::set_metaspace_oom(true);
 719     return nullptr;
 720   }
 721 
 722   if (!mh->init_method_counters(counters)) {
 723     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 724   }
 725 
 726   if (ForceProfiling && TrainingData::need_data()) {
 727     MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
 728     guarantee(mtd != nullptr, "");
 729   }
 730 
 731   return mh->method_counters();
 732 }
 733 
 734 bool Method::init_method_counters(MethodCounters* counters) {
 735   // Try to install a pointer to MethodCounters, return true on success.
 736   return Atomic::replace_if_null(&_method_counters, counters);
 737 }
 738 
 739 void Method::set_exception_handler_entered(int handler_bci) {
 740   if (ProfileExceptionHandlers) {
 741     MethodData* mdo = method_data();
 742     if (mdo != nullptr) {
 743       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 744       handler_data.set_exception_handler_entered();
 745     }
 746   }
 747 }
 748 
 749 int Method::extra_stack_words() {
 750   // not an inline function, to avoid a header dependency on Interpreter

 937   return (is_static() ||
 938           method_holder()->major_version() < 51);
 939 }
 940 
 941 bool Method::is_static_initializer() const {
 942   // For classfiles version 51 or greater, ensure that the clinit method is
 943   // static.  Non-static methods with the name "<clinit>" are not static
 944   // initializers. (older classfiles exempted for backward compatibility)
 945   return name() == vmSymbols::class_initializer_name() &&
 946          has_valid_initializer_flags();
 947 }
 948 
 949 bool Method::is_object_initializer() const {
 950    return name() == vmSymbols::object_initializer_name();
 951 }
 952 
 953 bool Method::needs_clinit_barrier() const {
 954   return is_static() && !method_holder()->is_initialized();
 955 }
 956 
 957 bool Method::code_has_clinit_barriers() const {
 958   nmethod* nm = code();
 959   return (nm != nullptr) && nm->has_clinit_barriers();
 960 }
 961 
 962 bool Method::is_object_wait0() const {
 963   return klass_name() == vmSymbols::java_lang_Object()
 964          && name() == vmSymbols::wait_name();
 965 }
 966 
 967 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 968   int length = method->checked_exceptions_length();
 969   if (length == 0) {  // common case
 970     return objArrayHandle(THREAD, Universe::the_empty_class_array());
 971   } else {
 972     methodHandle h_this(THREAD, method);
 973     objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
 974     objArrayHandle mirrors (THREAD, m_oop);
 975     for (int i = 0; i < length; i++) {
 976       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
 977       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
 978       if (log_is_enabled(Warning, exceptions) &&
 979           !k->is_subclass_of(vmClasses::Throwable_klass())) {
 980         ResourceMark rm(THREAD);
 981         log_warning(exceptions)(

1232   if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1233     _adapter = nullptr;
1234   }
1235   _i2i_entry = nullptr;
1236   _from_compiled_entry = nullptr;
1237   _from_interpreted_entry = nullptr;
1238 
1239   if (is_native()) {
1240     *native_function_addr() = nullptr;
1241     set_signature_handler(nullptr);
1242   }
1243   NOT_PRODUCT(set_compiled_invocation_count(0);)
1244 
1245   clear_method_data();
1246   clear_method_counters();
1247   clear_is_not_c1_compilable();
1248   clear_is_not_c1_osr_compilable();
1249   clear_is_not_c2_compilable();
1250   clear_is_not_c2_osr_compilable();
1251   clear_queued_for_compilation();
1252   set_pending_queue_processed(false);
1253 
1254   remove_unshareable_flags();
1255 }
1256 
1257 void Method::remove_unshareable_flags() {
1258   // clear all the flags that shouldn't be in the archived version
1259   assert(!is_old(), "must be");
1260   assert(!is_obsolete(), "must be");
1261   assert(!is_deleted(), "must be");
1262 
1263   set_is_prefixed_native(false);
1264   set_queued_for_compilation(false);
1265   set_pending_queue_processed(false);
1266   set_is_not_c2_compilable(false);
1267   set_is_not_c1_compilable(false);
1268   set_is_not_c2_osr_compilable(false);
1269   set_on_stack_flag(false);
1270   set_has_upcall_on_method_entry(false);
1271   set_has_upcall_on_method_exit(false);
1272 }
1273 #endif
1274 
1275 // Called when the method_holder is getting linked. Setup entrypoints so the method
1276 // is ready to be called from interpreter, compiler, and vtables.
1277 void Method::link_method(const methodHandle& h_method, TRAPS) {
1278   if (log_is_enabled(Info, perf, class, link)) {
1279     ClassLoader::perf_ik_link_methods_count()->inc();
1280   }
1281 
1282   // If the code cache is full, we may reenter this function for the
1283   // leftover methods that weren't linked.
1284   if (adapter() != nullptr) {
1285     if (adapter()->is_shared()) {
1286       assert(adapter()->is_linked(), "Adapter is shared but not linked");
1287     } else {
1288       return;
1289     }
1290   }
1291   assert( _code == nullptr, "nothing compiled yet" );

1317   if (_adapter == nullptr) {
1318     (void) make_adapters(h_method, CHECK);
1319     assert(adapter()->is_linked(), "Adapter must have been linked");
1320   }
1321 
1322   // ONLY USE the h_method now as make_adapter may have blocked
1323 
1324   if (h_method->is_continuation_native_intrinsic()) {
1325     _from_interpreted_entry = nullptr;
1326     _from_compiled_entry = nullptr;
1327     _i2i_entry = nullptr;
1328     if (Continuations::enabled()) {
1329       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1330       AdapterHandlerLibrary::create_native_wrapper(h_method);
1331       if (!h_method->has_compiled_code()) {
1332         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1333       }
1334       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1335     }
1336   }
1337   if (_preload_code != nullptr) {
1338     MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1339     set_code(h_method, _preload_code);
1340     assert(((nmethod*)_preload_code)->aot_code_entry() == _aot_code_entry, "sanity");
1341   }
1342 }
1343 
1344 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1345   PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1346 
1347   // Adapters for compiled code are made eagerly here.  They are fairly
1348   // small (generally < 100 bytes) and quick to make (and cached and shared)
1349   // so making them eagerly shouldn't be too expensive.
1350   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1351   if (adapter == nullptr ) {
1352     if (!is_init_completed()) {
1353       // Don't throw exceptions during VM initialization because java.lang.* classes
1354       // might not have been initialized, causing problems when constructing the
1355       // Java exception object.
1356       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1357     } else {
1358       THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1359     }
1360   }
1361 
1362   mh->set_adapter_entry(adapter);
1363   mh->_from_compiled_entry = adapter->get_c2i_entry();
1364   return adapter->get_c2i_entry();
1365 }
< prev index next >