< prev index next >

src/hotspot/share/oops/method.cpp

Print this page

 179   if (is_abstract()) {
 180     return nullptr;
 181   }
 182   assert(VM_Version::supports_fast_class_init_checks(), "");
 183   assert(adapter() != nullptr, "must have");
 184   return adapter()->get_c2i_no_clinit_check_entry();
 185 }
 186 
 187 char* Method::name_and_sig_as_C_string() const {
 188   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 189 }
 190 
 191 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 192   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 193 }
 194 
 195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 196   const char* klass_name = klass->external_name();
 197   int klass_name_len  = (int)strlen(klass_name);
 198   int method_name_len = method_name->utf8_length();
 199   int len             = klass_name_len + 1 + method_name_len + signature->utf8_length();
 200   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 201   strcpy(dest, klass_name);
 202   dest[klass_name_len] = '.';
 203   strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
 204   strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());

 205   dest[len] = 0;
 206   return dest;
 207 }
 208 
 209 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 210   Symbol* klass_name = klass->name();
 211   klass_name->as_klass_external_name(buf, size);
 212   int len = (int)strlen(buf);
 213 
 214   if (len < size - 1) {
 215     buf[len++] = '.';
 216 
 217     method_name->as_C_string(&(buf[len]), size - len);
 218     len = (int)strlen(buf);
 219 
 220     signature->as_C_string(&(buf[len]), size - len);
 221   }
 222 
 223   return buf;
 224 }

 384 address Method::bcp_from(address bcp) const {
 385   if (is_native() && bcp == nullptr) {
 386     return code_base();
 387   } else {
 388     return bcp;
 389   }
 390 }
 391 
 392 int Method::size(bool is_native) {
 393   // If native, then include pointers for native_function and signature_handler
 394   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 395   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 396   return align_metadata_size(header_size() + extra_words);
 397 }
 398 
 399 Symbol* Method::klass_name() const {
 400   return method_holder()->name();
 401 }
 402 
 403 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 404   log_trace(aot)("Iter(Method): %p", this);
 405 
 406   if (!method_holder()->is_rewritten()) {





 407     it->push(&_constMethod, MetaspaceClosure::_writable);
 408   } else {
 409     it->push(&_constMethod);
 410   }
 411   it->push(&_adapter);
 412   it->push(&_method_data);
 413   it->push(&_method_counters);
 414   NOT_PRODUCT(it->push(&_name);)
 415 }
 416 
 417 #if INCLUDE_CDS
 418 // Attempt to return method to original state.  Clear any pointers
 419 // (to objects outside the shared spaces).  We won't be able to predict
 420 // where they should point in a new JVM.  Further initialize some
 421 // entries now in order allow them to be write protected later.
 422 
 423 void Method::remove_unshareable_info() {
 424   unlink_method();
 425   if (method_data() != nullptr) {
 426     method_data()->remove_unshareable_info();
 427   }
 428   if (method_counters() != nullptr) {
 429     method_counters()->remove_unshareable_info();
 430   }
 431   if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
 432     _adapter->remove_unshareable_info();
 433     _adapter = nullptr;
 434   }






 435   JFR_ONLY(REMOVE_METHOD_ID(this);)
 436 }
 437 
 438 void Method::restore_unshareable_info(TRAPS) {
 439   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 440   if (method_data() != nullptr) {
 441     method_data()->restore_unshareable_info(CHECK);
 442   }
 443   if (method_counters() != nullptr) {
 444     method_counters()->restore_unshareable_info(CHECK);
 445   }
 446   if (_adapter != nullptr) {
 447     assert(_adapter->is_linked(), "must be");
 448     _from_compiled_entry = _adapter->get_c2i_entry();
 449   }






 450   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");

 451 }
 452 #endif
 453 
 454 void Method::set_vtable_index(int index) {
 455   if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 456     // At runtime initialize_vtable is rerun as part of link_class_impl()
 457     // for a shared class loaded by the non-boot loader to obtain the loader
 458     // constraints based on the runtime classloaders' context.
 459     return; // don't write into the shared class
 460   } else {
 461     _vtable_index = index;
 462   }
 463 }
 464 
 465 void Method::set_itable_index(int index) {
 466   if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 467     // At runtime initialize_itable is rerun as part of link_class_impl()
 468     // for a shared class loaded by the non-boot loader to obtain the loader
 469     // constraints based on the runtime classloaders' context. The dumptime
 470     // itable index should be the same as the runtime index.

 654   // Do not profile the method if metaspace has hit an OOM previously
 655   // allocating profiling data. Callers clear pending exception so don't
 656   // add one here.
 657   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 658     return;
 659   }
 660 
 661   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 662   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 663   if (HAS_PENDING_EXCEPTION) {
 664     CompileBroker::log_metaspace_failure();
 665     ClassLoaderDataGraph::set_metaspace_oom(true);
 666     return;   // return the exception (which is cleared)
 667   }
 668 
 669   if (!AtomicAccess::replace_if_null(&method->_method_data, method_data)) {
 670     MetadataFactory::free_metadata(loader_data, method_data);
 671     return;
 672   }
 673 
 674   if (PrintMethodData && (Verbose || WizardMode)) {





 675     ResourceMark rm(THREAD);
 676     tty->print("build_profiling_method_data for ");
 677     method->print_name(tty);
 678     tty->cr();
 679     // At the end of the run, the MDO, full of data, will be dumped.
 680   }
 681 }
 682 
 683 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 684   // Do not profile the method if metaspace has hit an OOM previously
 685   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 686     return nullptr;
 687   }
 688 
 689   methodHandle mh(current, m);
 690   MethodCounters* counters;
 691   if (current->is_Java_thread()) {
 692     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 693     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 694     // if needed.
 695     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 696     if (HAS_PENDING_EXCEPTION) {
 697       CLEAR_PENDING_EXCEPTION;
 698     }
 699   } else {
 700     // Call metaspace allocation that doesn't throw exception if the
 701     // current thread isn't a JavaThread, ie. the VMThread.
 702     counters = MethodCounters::allocate_no_exception(mh);
 703   }
 704 
 705   if (counters == nullptr) {
 706     CompileBroker::log_metaspace_failure();
 707     ClassLoaderDataGraph::set_metaspace_oom(true);
 708     return nullptr;
 709   }
 710 
 711   if (!mh->init_method_counters(counters)) {
 712     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 713   }
 714 





 715   return mh->method_counters();
 716 }
 717 
 718 bool Method::init_method_counters(MethodCounters* counters) {
 719   // Try to install a pointer to MethodCounters, return true on success.
 720   return AtomicAccess::replace_if_null(&_method_counters, counters);
 721 }
 722 
 723 void Method::set_exception_handler_entered(int handler_bci) {
 724   if (ProfileExceptionHandlers) {
 725     MethodData* mdo = method_data();
 726     if (mdo != nullptr) {
 727       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 728       handler_data.set_exception_handler_entered();
 729     }
 730   }
 731 }
 732 
 733 int Method::extra_stack_words() {
 734   // not an inline function, to avoid a header dependency on Interpreter

1211   if (!CDSConfig::is_dumping_adapters()) {
1212     _adapter = nullptr;
1213   }
1214   _i2i_entry = nullptr;
1215   _from_compiled_entry = nullptr;
1216   _from_interpreted_entry = nullptr;
1217 
1218   if (is_native()) {
1219     *native_function_addr() = nullptr;
1220     set_signature_handler(nullptr);
1221   }
1222   NOT_PRODUCT(set_compiled_invocation_count(0);)
1223 
1224   clear_method_data();
1225   clear_method_counters();
1226   clear_is_not_c1_compilable();
1227   clear_is_not_c1_osr_compilable();
1228   clear_is_not_c2_compilable();
1229   clear_is_not_c2_osr_compilable();
1230   clear_queued_for_compilation();

1231 
1232   remove_unshareable_flags();
1233 }
1234 
1235 void Method::remove_unshareable_flags() {
1236   // clear all the flags that shouldn't be in the archived version
1237   assert(!is_old(), "must be");
1238   assert(!is_obsolete(), "must be");
1239   assert(!is_deleted(), "must be");
1240 
1241   set_is_prefixed_native(false);
1242   set_queued_for_compilation(false);

1243   set_is_not_c2_compilable(false);
1244   set_is_not_c1_compilable(false);
1245   set_is_not_c2_osr_compilable(false);
1246   set_on_stack_flag(false);


1247 }
1248 #endif
1249 
1250 // Called when the method_holder is getting linked. Setup entrypoints so the method
1251 // is ready to be called from interpreter, compiler, and vtables.
1252 void Method::link_method(const methodHandle& h_method, TRAPS) {
1253   if (log_is_enabled(Info, perf, class, link)) {
1254     ClassLoader::perf_ik_link_methods_count()->inc();
1255   }
1256 
1257   // If the code cache is full, we may reenter this function for the
1258   // leftover methods that weren't linked.
1259   if (adapter() != nullptr) {
1260     if (adapter()->in_aot_cache()) {
1261       assert(adapter()->is_linked(), "Adapter is shared but not linked");
1262     } else {
1263       return;
1264     }
1265   }
1266   assert( _code == nullptr, "nothing compiled yet" );

1301 
1302   // ONLY USE the h_method now as make_adapter may have blocked
1303 
1304   if (h_method->is_continuation_native_intrinsic()) {
1305     _from_interpreted_entry = nullptr;
1306     _from_compiled_entry = nullptr;
1307     _i2i_entry = nullptr;
1308     if (Continuations::enabled()) {
1309       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1310       AdapterHandlerLibrary::create_native_wrapper(h_method);
1311       if (!h_method->has_compiled_code()) {
1312         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1313       }
1314       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1315     }
1316   }
1317 }
1318 
1319 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1320   assert(!mh->is_abstract(), "abstract methods do not have adapters");
1321   PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1322 
1323   // Adapters for compiled code are made eagerly here.  They are fairly
1324   // small (generally < 100 bytes) and quick to make (and cached and shared)
1325   // so making them eagerly shouldn't be too expensive.
1326   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1327   if (adapter == nullptr ) {
1328     if (!is_init_completed()) {
1329       // Don't throw exceptions during VM initialization because java.lang.* classes
1330       // might not have been initialized, causing problems when constructing the
1331       // Java exception object.
1332       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1333     } else {
1334       THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1335     }
1336   }
1337 
1338   mh->set_adapter_entry(adapter);
1339   return adapter->get_c2i_entry();
1340 }
1341 

 179   if (is_abstract()) {
 180     return nullptr;
 181   }
 182   assert(VM_Version::supports_fast_class_init_checks(), "");
 183   assert(adapter() != nullptr, "must have");
 184   return adapter()->get_c2i_no_clinit_check_entry();
 185 }
 186 
 187 char* Method::name_and_sig_as_C_string() const {
 188   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 189 }
 190 
 191 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 192   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 193 }
 194 
 195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 196   const char* klass_name = klass->external_name();
 197   int klass_name_len  = (int)strlen(klass_name);
 198   int method_name_len = method_name->utf8_length();
 199   int len             = klass_name_len + 2 + method_name_len + signature->utf8_length();
 200   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 201   strcpy(dest, klass_name);
 202   dest[klass_name_len + 0] = ':';
 203   dest[klass_name_len + 1] = ':';
 204   strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
 205   strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
 206   dest[len] = 0;
 207   return dest;
 208 }
 209 
 210 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 211   Symbol* klass_name = klass->name();
 212   klass_name->as_klass_external_name(buf, size);
 213   int len = (int)strlen(buf);
 214 
 215   if (len < size - 1) {
 216     buf[len++] = '.';
 217 
 218     method_name->as_C_string(&(buf[len]), size - len);
 219     len = (int)strlen(buf);
 220 
 221     signature->as_C_string(&(buf[len]), size - len);
 222   }
 223 
 224   return buf;
 225 }

 385 address Method::bcp_from(address bcp) const {
 386   if (is_native() && bcp == nullptr) {
 387     return code_base();
 388   } else {
 389     return bcp;
 390   }
 391 }
 392 
 393 int Method::size(bool is_native) {
 394   // If native, then include pointers for native_function and signature_handler
 395   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 396   int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
 397   return align_metadata_size(header_size() + extra_words);
 398 }
 399 
 400 Symbol* Method::klass_name() const {
 401   return method_holder()->name();
 402 }
 403 
 404 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
 405   LogStreamHandle(Trace, aot) lsh;
 406   if (lsh.is_enabled()) {
 407     lsh.print("Iter(Method): %p ", this);
 408     print_external_name(&lsh);
 409     lsh.cr();
 410   }
 411   if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
 412     // holder is null for MH intrinsic methods
 413     it->push(&_constMethod, MetaspaceClosure::_writable);
 414   } else {
 415     it->push(&_constMethod);
 416   }
 417   it->push(&_adapter);
 418   it->push(&_method_data);
 419   it->push(&_method_counters);
 420   NOT_PRODUCT(it->push(&_name);)
 421 }
 422 
 423 #if INCLUDE_CDS
 424 // Attempt to return method to original state.  Clear any pointers
 425 // (to objects outside the shared spaces).  We won't be able to predict
 426 // where they should point in a new JVM.  Further initialize some
 427 // entries now in order allow them to be write protected later.
 428 
 429 void Method::remove_unshareable_info() {
 430   unlink_method();
 431   if (method_data() != nullptr) {
 432     method_data()->remove_unshareable_info();
 433   }
 434   if (method_counters() != nullptr) {
 435     method_counters()->remove_unshareable_info();
 436   }
 437   if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
 438     _adapter->remove_unshareable_info();
 439     _adapter = nullptr;
 440   }
 441   if (method_data() != nullptr) {
 442     method_data()->remove_unshareable_info();
 443   }
 444   if (method_counters() != nullptr) {
 445     method_counters()->remove_unshareable_info();
 446   }
 447   JFR_ONLY(REMOVE_METHOD_ID(this);)
 448 }
 449 
 450 void Method::restore_unshareable_info(TRAPS) {
 451   assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
 452   if (method_data() != nullptr) {
 453     method_data()->restore_unshareable_info(CHECK);
 454   }
 455   if (method_counters() != nullptr) {
 456     method_counters()->restore_unshareable_info(CHECK);
 457   }
 458   if (_adapter != nullptr) {
 459     assert(_adapter->is_linked(), "must be");
 460     _from_compiled_entry = _adapter->get_c2i_entry();
 461   }
 462   if (method_data() != nullptr) {
 463     method_data()->restore_unshareable_info(CHECK);
 464   }
 465   if (method_counters() != nullptr) {
 466     method_counters()->restore_unshareable_info(CHECK);
 467   }
 468   assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
 469   assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
 470 }
 471 #endif
 472 
 473 void Method::set_vtable_index(int index) {
 474   if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 475     // At runtime initialize_vtable is rerun as part of link_class_impl()
 476     // for a shared class loaded by the non-boot loader to obtain the loader
 477     // constraints based on the runtime classloaders' context.
 478     return; // don't write into the shared class
 479   } else {
 480     _vtable_index = index;
 481   }
 482 }
 483 
 484 void Method::set_itable_index(int index) {
 485   if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
 486     // At runtime initialize_itable is rerun as part of link_class_impl()
 487     // for a shared class loaded by the non-boot loader to obtain the loader
 488     // constraints based on the runtime classloaders' context. The dumptime
 489     // itable index should be the same as the runtime index.

 673   // Do not profile the method if metaspace has hit an OOM previously
 674   // allocating profiling data. Callers clear pending exception so don't
 675   // add one here.
 676   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 677     return;
 678   }
 679 
 680   ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 681   MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 682   if (HAS_PENDING_EXCEPTION) {
 683     CompileBroker::log_metaspace_failure();
 684     ClassLoaderDataGraph::set_metaspace_oom(true);
 685     return;   // return the exception (which is cleared)
 686   }
 687 
 688   if (!AtomicAccess::replace_if_null(&method->_method_data, method_data)) {
 689     MetadataFactory::free_metadata(loader_data, method_data);
 690     return;
 691   }
 692 
 693   if (ForceProfiling && TrainingData::need_data()) {
 694     MethodTrainingData* mtd = MethodTrainingData::make(method, false);
 695     guarantee(mtd != nullptr, "");
 696   }
 697 
 698   if (PrintMethodData) {
 699     ResourceMark rm(THREAD);
 700     tty->print("build_profiling_method_data for ");
 701     method->print_name(tty);
 702     tty->cr();
 703     // At the end of the run, the MDO, full of data, will be dumped.
 704   }
 705 }
 706 
 707 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
 708   // Do not profile the method if metaspace has hit an OOM previously
 709   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 710     return nullptr;
 711   }
 712 
 713   methodHandle mh(current, m);
 714   MethodCounters* counters;
 715   if (current->is_Java_thread()) {
 716     JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
 717     // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
 718     // if needed.
 719     counters = MethodCounters::allocate_with_exception(mh, THREAD);
 720     if (HAS_PENDING_EXCEPTION) {
 721       CLEAR_PENDING_EXCEPTION;
 722     }
 723   } else {
 724     // Call metaspace allocation that doesn't throw exception if the
 725     // current thread isn't a JavaThread, ie. the VMThread.
 726     counters = MethodCounters::allocate_no_exception(mh);
 727   }
 728 
 729   if (counters == nullptr) {
 730     CompileBroker::log_metaspace_failure();
 731     ClassLoaderDataGraph::set_metaspace_oom(true);
 732     return nullptr;
 733   }
 734 
 735   if (!mh->init_method_counters(counters)) {
 736     MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
 737   }
 738 
 739   if (ForceProfiling && TrainingData::need_data()) {
 740     MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
 741     guarantee(mtd != nullptr, "");
 742   }
 743 
 744   return mh->method_counters();
 745 }
 746 
 747 bool Method::init_method_counters(MethodCounters* counters) {
 748   // Try to install a pointer to MethodCounters, return true on success.
 749   return AtomicAccess::replace_if_null(&_method_counters, counters);
 750 }
 751 
 752 void Method::set_exception_handler_entered(int handler_bci) {
 753   if (ProfileExceptionHandlers) {
 754     MethodData* mdo = method_data();
 755     if (mdo != nullptr) {
 756       BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
 757       handler_data.set_exception_handler_entered();
 758     }
 759   }
 760 }
 761 
 762 int Method::extra_stack_words() {
 763   // not an inline function, to avoid a header dependency on Interpreter

1240   if (!CDSConfig::is_dumping_adapters()) {
1241     _adapter = nullptr;
1242   }
1243   _i2i_entry = nullptr;
1244   _from_compiled_entry = nullptr;
1245   _from_interpreted_entry = nullptr;
1246 
1247   if (is_native()) {
1248     *native_function_addr() = nullptr;
1249     set_signature_handler(nullptr);
1250   }
1251   NOT_PRODUCT(set_compiled_invocation_count(0);)
1252 
1253   clear_method_data();
1254   clear_method_counters();
1255   clear_is_not_c1_compilable();
1256   clear_is_not_c1_osr_compilable();
1257   clear_is_not_c2_compilable();
1258   clear_is_not_c2_osr_compilable();
1259   clear_queued_for_compilation();
1260   set_pending_queue_processed(false);
1261 
1262   remove_unshareable_flags();
1263 }
1264 
1265 void Method::remove_unshareable_flags() {
1266   // clear all the flags that shouldn't be in the archived version
1267   assert(!is_old(), "must be");
1268   assert(!is_obsolete(), "must be");
1269   assert(!is_deleted(), "must be");
1270 
1271   set_is_prefixed_native(false);
1272   set_queued_for_compilation(false);
1273   set_pending_queue_processed(false);
1274   set_is_not_c2_compilable(false);
1275   set_is_not_c1_compilable(false);
1276   set_is_not_c2_osr_compilable(false);
1277   set_on_stack_flag(false);
1278   set_has_upcall_on_method_entry(false);
1279   set_has_upcall_on_method_exit(false);
1280 }
1281 #endif
1282 
1283 // Called when the method_holder is getting linked. Setup entrypoints so the method
1284 // is ready to be called from interpreter, compiler, and vtables.
1285 void Method::link_method(const methodHandle& h_method, TRAPS) {
1286   if (log_is_enabled(Info, perf, class, link)) {
1287     ClassLoader::perf_ik_link_methods_count()->inc();
1288   }
1289 
1290   // If the code cache is full, we may reenter this function for the
1291   // leftover methods that weren't linked.
1292   if (adapter() != nullptr) {
1293     if (adapter()->in_aot_cache()) {
1294       assert(adapter()->is_linked(), "Adapter is shared but not linked");
1295     } else {
1296       return;
1297     }
1298   }
1299   assert( _code == nullptr, "nothing compiled yet" );

1334 
1335   // ONLY USE the h_method now as make_adapter may have blocked
1336 
1337   if (h_method->is_continuation_native_intrinsic()) {
1338     _from_interpreted_entry = nullptr;
1339     _from_compiled_entry = nullptr;
1340     _i2i_entry = nullptr;
1341     if (Continuations::enabled()) {
1342       assert(!Threads::is_vm_complete(), "should only be called during vm init");
1343       AdapterHandlerLibrary::create_native_wrapper(h_method);
1344       if (!h_method->has_compiled_code()) {
1345         THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1346       }
1347       assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1348     }
1349   }
1350 }
1351 
1352 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1353   assert(!mh->is_abstract(), "abstract methods do not have adapters");
1354   PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1355 
1356   // Adapters for compiled code are made eagerly here.  They are fairly
1357   // small (generally < 100 bytes) and quick to make (and cached and shared)
1358   // so making them eagerly shouldn't be too expensive.
1359   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1360   if (adapter == nullptr ) {
1361     if (!is_init_completed()) {
1362       // Don't throw exceptions during VM initialization because java.lang.* classes
1363       // might not have been initialized, causing problems when constructing the
1364       // Java exception object.
1365       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1366     } else {
1367       THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1368     }
1369   }
1370 
1371   mh->set_adapter_entry(adapter);
1372   return adapter->get_c2i_entry();
1373 }
1374 
< prev index next >