100 sizes,
101 method_type,
102 CHECK_NULL);
103 int size = Method::size(access_flags.is_native());
104 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
105 }
106
107 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
108 NoSafepointVerifier no_safepoint;
109 set_constMethod(xconst);
110 set_access_flags(access_flags);
111 set_intrinsic_id(vmIntrinsics::_none);
112 clear_method_data();
113 clear_method_counters();
114 set_vtable_index(Method::garbage_vtable_index);
115
116 // Fix and bury in Method*
117 set_interpreter_entry(nullptr); // sets i2i entry and from_int
118 set_adapter_entry(nullptr);
119 Method::clear_code(); // from_c/from_i get set to c2i/i2i
120
121 if (access_flags.is_native()) {
122 clear_native_function();
123 set_signature_handler(nullptr);
124 }
125
126 NOT_PRODUCT(set_compiled_invocation_count(0);)
127 // Name is very useful for debugging.
128 NOT_PRODUCT(_name = name;)
129 }
130
131 // Release Method*. The nmethod will be gone when we get here because
132 // we've walked the code cache.
133 void Method::deallocate_contents(ClassLoaderData* loader_data) {
134 MetadataFactory::free_metadata(loader_data, constMethod());
135 set_constMethod(nullptr);
136 MetadataFactory::free_metadata(loader_data, method_data());
137 clear_method_data();
138 MetadataFactory::free_metadata(loader_data, method_counters());
139 clear_method_counters();
140 set_adapter_entry(nullptr);
141 // The nmethod will be gone when we get here.
142 if (code() != nullptr) _code = nullptr;
143 }
144
145 void Method::release_C_heap_structures() {
146 if (method_data()) {
147 method_data()->release_C_heap_structures();
148
149 // Destroy MethodData embedded lock
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 if (is_abstract()) {
156 return SharedRuntime::throw_AbstractMethodError_entry();
157 }
158 assert(adapter() != nullptr, "must have");
159 return adapter()->get_i2c_entry();
160 }
161
162 address Method::get_c2i_entry() {
179 if (is_abstract()) {
180 return nullptr;
181 }
182 assert(VM_Version::supports_fast_class_init_checks(), "");
183 assert(adapter() != nullptr, "must have");
184 return adapter()->get_c2i_no_clinit_check_entry();
185 }
186
187 char* Method::name_and_sig_as_C_string() const {
188 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
189 }
190
191 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
192 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
193 }
194
195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
196 const char* klass_name = klass->external_name();
197 int klass_name_len = (int)strlen(klass_name);
198 int method_name_len = method_name->utf8_length();
199 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
200 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
201 strcpy(dest, klass_name);
202 dest[klass_name_len] = '.';
203 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
204 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
205 dest[len] = 0;
206 return dest;
207 }
208
209 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
210 Symbol* klass_name = klass->name();
211 klass_name->as_klass_external_name(buf, size);
212 int len = (int)strlen(buf);
213
214 if (len < size - 1) {
215 buf[len++] = '.';
216
217 method_name->as_C_string(&(buf[len]), size - len);
218 len = (int)strlen(buf);
219
220 signature->as_C_string(&(buf[len]), size - len);
221 }
222
223 return buf;
224 }
384 address Method::bcp_from(address bcp) const {
385 if (is_native() && bcp == nullptr) {
386 return code_base();
387 } else {
388 return bcp;
389 }
390 }
391
392 int Method::size(bool is_native) {
393 // If native, then include pointers for native_function and signature_handler
394 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
395 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
396 return align_metadata_size(header_size() + extra_words);
397 }
398
399 Symbol* Method::klass_name() const {
400 return method_holder()->name();
401 }
402
403 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
404 log_trace(aot)("Iter(Method): %p", this);
405
406 if (!method_holder()->is_rewritten()) {
407 it->push(&_constMethod, MetaspaceClosure::_writable);
408 } else {
409 it->push(&_constMethod);
410 }
411 it->push(&_adapter);
412 it->push(&_method_data);
413 it->push(&_method_counters);
414 NOT_PRODUCT(it->push(&_name);)
415 }
416
417 #if INCLUDE_CDS
418 // Attempt to return method to original state. Clear any pointers
419 // (to objects outside the shared spaces). We won't be able to predict
420 // where they should point in a new JVM. Further initialize some
421 // entries now in order allow them to be write protected later.
422
423 void Method::remove_unshareable_info() {
424 unlink_method();
425 if (method_data() != nullptr) {
426 method_data()->remove_unshareable_info();
427 }
428 if (method_counters() != nullptr) {
429 method_counters()->remove_unshareable_info();
430 }
431 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
432 _adapter->remove_unshareable_info();
433 _adapter = nullptr;
434 }
435 JFR_ONLY(REMOVE_METHOD_ID(this);)
436 }
437
438 void Method::restore_unshareable_info(TRAPS) {
439 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
440 if (method_data() != nullptr) {
441 method_data()->restore_unshareable_info(CHECK);
442 }
443 if (method_counters() != nullptr) {
444 method_counters()->restore_unshareable_info(CHECK);
445 }
446 if (_adapter != nullptr) {
447 assert(_adapter->is_linked(), "must be");
448 _from_compiled_entry = _adapter->get_c2i_entry();
449 }
450 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
451 }
452 #endif
453
454 void Method::set_vtable_index(int index) {
455 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
456 // At runtime initialize_vtable is rerun as part of link_class_impl()
457 // for a shared class loaded by the non-boot loader to obtain the loader
458 // constraints based on the runtime classloaders' context.
459 return; // don't write into the shared class
460 } else {
461 _vtable_index = index;
462 }
463 }
464
465 void Method::set_itable_index(int index) {
466 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
467 // At runtime initialize_itable is rerun as part of link_class_impl()
468 // for a shared class loaded by the non-boot loader to obtain the loader
469 // constraints based on the runtime classloaders' context. The dumptime
470 // itable index should be the same as the runtime index.
654 // Do not profile the method if metaspace has hit an OOM previously
655 // allocating profiling data. Callers clear pending exception so don't
656 // add one here.
657 if (ClassLoaderDataGraph::has_metaspace_oom()) {
658 return;
659 }
660
661 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
662 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
663 if (HAS_PENDING_EXCEPTION) {
664 CompileBroker::log_metaspace_failure();
665 ClassLoaderDataGraph::set_metaspace_oom(true);
666 return; // return the exception (which is cleared)
667 }
668
669 if (!AtomicAccess::replace_if_null(&method->_method_data, method_data)) {
670 MetadataFactory::free_metadata(loader_data, method_data);
671 return;
672 }
673
674 if (PrintMethodData && (Verbose || WizardMode)) {
675 ResourceMark rm(THREAD);
676 tty->print("build_profiling_method_data for ");
677 method->print_name(tty);
678 tty->cr();
679 // At the end of the run, the MDO, full of data, will be dumped.
680 }
681 }
682
683 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
684 // Do not profile the method if metaspace has hit an OOM previously
685 if (ClassLoaderDataGraph::has_metaspace_oom()) {
686 return nullptr;
687 }
688
689 methodHandle mh(current, m);
690 MethodCounters* counters;
691 if (current->is_Java_thread()) {
692 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
693 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
694 // if needed.
695 counters = MethodCounters::allocate_with_exception(mh, THREAD);
696 if (HAS_PENDING_EXCEPTION) {
697 CLEAR_PENDING_EXCEPTION;
698 }
699 } else {
700 // Call metaspace allocation that doesn't throw exception if the
701 // current thread isn't a JavaThread, ie. the VMThread.
702 counters = MethodCounters::allocate_no_exception(mh);
703 }
704
705 if (counters == nullptr) {
706 CompileBroker::log_metaspace_failure();
707 ClassLoaderDataGraph::set_metaspace_oom(true);
708 return nullptr;
709 }
710
711 if (!mh->init_method_counters(counters)) {
712 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
713 }
714
715 return mh->method_counters();
716 }
717
718 bool Method::init_method_counters(MethodCounters* counters) {
719 // Try to install a pointer to MethodCounters, return true on success.
720 return AtomicAccess::replace_if_null(&_method_counters, counters);
721 }
722
723 void Method::set_exception_handler_entered(int handler_bci) {
724 if (ProfileExceptionHandlers) {
725 MethodData* mdo = method_data();
726 if (mdo != nullptr) {
727 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
728 handler_data.set_exception_handler_entered();
729 }
730 }
731 }
732
733 int Method::extra_stack_words() {
734 // not an inline function, to avoid a header dependency on Interpreter
921 return (is_static() ||
922 method_holder()->major_version() < 51);
923 }
924
925 bool Method::is_static_initializer() const {
926 // For classfiles version 51 or greater, ensure that the clinit method is
927 // static. Non-static methods with the name "<clinit>" are not static
928 // initializers. (older classfiles exempted for backward compatibility)
929 return name() == vmSymbols::class_initializer_name() &&
930 has_valid_initializer_flags();
931 }
932
933 bool Method::is_object_initializer() const {
934 return name() == vmSymbols::object_initializer_name();
935 }
936
937 bool Method::needs_clinit_barrier() const {
938 return is_static() && !method_holder()->is_initialized();
939 }
940
941 bool Method::is_object_wait0() const {
942 return klass_name() == vmSymbols::java_lang_Object()
943 && name() == vmSymbols::wait_name();
944 }
945
946 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
947 int length = method->checked_exceptions_length();
948 if (length == 0) { // common case
949 return objArrayHandle(THREAD, Universe::the_empty_class_array());
950 } else {
951 methodHandle h_this(THREAD, method);
952 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
953 objArrayHandle mirrors (THREAD, m_oop);
954 for (int i = 0; i < length; i++) {
955 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
956 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
957 if (log_is_enabled(Warning, exceptions) &&
958 !k->is_subclass_of(vmClasses::Throwable_klass())) {
959 ResourceMark rm(THREAD);
960 log_warning(exceptions)(
1211 if (!CDSConfig::is_dumping_adapters()) {
1212 _adapter = nullptr;
1213 }
1214 _i2i_entry = nullptr;
1215 _from_compiled_entry = nullptr;
1216 _from_interpreted_entry = nullptr;
1217
1218 if (is_native()) {
1219 *native_function_addr() = nullptr;
1220 set_signature_handler(nullptr);
1221 }
1222 NOT_PRODUCT(set_compiled_invocation_count(0);)
1223
1224 clear_method_data();
1225 clear_method_counters();
1226 clear_is_not_c1_compilable();
1227 clear_is_not_c1_osr_compilable();
1228 clear_is_not_c2_compilable();
1229 clear_is_not_c2_osr_compilable();
1230 clear_queued_for_compilation();
1231
1232 remove_unshareable_flags();
1233 }
1234
1235 void Method::remove_unshareable_flags() {
1236 // clear all the flags that shouldn't be in the archived version
1237 assert(!is_old(), "must be");
1238 assert(!is_obsolete(), "must be");
1239 assert(!is_deleted(), "must be");
1240
1241 set_is_prefixed_native(false);
1242 set_queued_for_compilation(false);
1243 set_is_not_c2_compilable(false);
1244 set_is_not_c1_compilable(false);
1245 set_is_not_c2_osr_compilable(false);
1246 set_on_stack_flag(false);
1247 }
1248 #endif
1249
1250 // Called when the method_holder is getting linked. Setup entrypoints so the method
1251 // is ready to be called from interpreter, compiler, and vtables.
1252 void Method::link_method(const methodHandle& h_method, TRAPS) {
1253 if (log_is_enabled(Info, perf, class, link)) {
1254 ClassLoader::perf_ik_link_methods_count()->inc();
1255 }
1256
1257 // If the code cache is full, we may reenter this function for the
1258 // leftover methods that weren't linked.
1259 if (adapter() != nullptr) {
1260 if (adapter()->in_aot_cache()) {
1261 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1262 } else {
1263 return;
1264 }
1265 }
1266 assert( _code == nullptr, "nothing compiled yet" );
1297 assert(adapter()->is_linked(), "Adapter must have been linked");
1298 #endif
1299 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1300 }
1301
1302 // ONLY USE the h_method now as make_adapter may have blocked
1303
1304 if (h_method->is_continuation_native_intrinsic()) {
1305 _from_interpreted_entry = nullptr;
1306 _from_compiled_entry = nullptr;
1307 _i2i_entry = nullptr;
1308 if (Continuations::enabled()) {
1309 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1310 AdapterHandlerLibrary::create_native_wrapper(h_method);
1311 if (!h_method->has_compiled_code()) {
1312 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1313 }
1314 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1315 }
1316 }
1317 }
1318
1319 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1320 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1321 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1322
1323 // Adapters for compiled code are made eagerly here. They are fairly
1324 // small (generally < 100 bytes) and quick to make (and cached and shared)
1325 // so making them eagerly shouldn't be too expensive.
1326 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1327 if (adapter == nullptr ) {
1328 if (!is_init_completed()) {
1329 // Don't throw exceptions during VM initialization because java.lang.* classes
1330 // might not have been initialized, causing problems when constructing the
1331 // Java exception object.
1332 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1333 } else {
1334 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1335 }
1336 }
1337
1338 mh->set_adapter_entry(adapter);
1339 return adapter->get_c2i_entry();
1340 }
1341
|
100 sizes,
101 method_type,
102 CHECK_NULL);
103 int size = Method::size(access_flags.is_native());
104 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
105 }
106
107 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
108 NoSafepointVerifier no_safepoint;
109 set_constMethod(xconst);
110 set_access_flags(access_flags);
111 set_intrinsic_id(vmIntrinsics::_none);
112 clear_method_data();
113 clear_method_counters();
114 set_vtable_index(Method::garbage_vtable_index);
115
116 // Fix and bury in Method*
117 set_interpreter_entry(nullptr); // sets i2i entry and from_int
118 set_adapter_entry(nullptr);
119 Method::clear_code(); // from_c/from_i get set to c2i/i2i
120 set_preload_code(nullptr);
121 set_aot_code_entry(nullptr);
122
123 if (access_flags.is_native()) {
124 clear_native_function();
125 set_signature_handler(nullptr);
126 }
127
128 NOT_PRODUCT(set_compiled_invocation_count(0);)
129 // Name is very useful for debugging.
130 NOT_PRODUCT(_name = name;)
131 }
132
133 // Release Method*. The nmethod will be gone when we get here because
134 // we've walked the code cache.
135 void Method::deallocate_contents(ClassLoaderData* loader_data) {
136 MetadataFactory::free_metadata(loader_data, constMethod());
137 set_constMethod(nullptr);
138 MetadataFactory::free_metadata(loader_data, method_data());
139 clear_method_data();
140 MetadataFactory::free_metadata(loader_data, method_counters());
141 clear_method_counters();
142 set_adapter_entry(nullptr);
143 // The nmethod will be gone when we get here.
144 if (code() != nullptr) _code = nullptr;
145 if (aot_code_entry() != nullptr) {
146 set_preload_code(nullptr);
147 AOTCodeCache::invalidate(aot_code_entry());
148 set_aot_code_entry(nullptr);
149 }
150 }
151
152 void Method::release_C_heap_structures() {
153 if (method_data()) {
154 method_data()->release_C_heap_structures();
155
156 // Destroy MethodData embedded lock
157 method_data()->~MethodData();
158 }
159 }
160
161 address Method::get_i2c_entry() {
162 if (is_abstract()) {
163 return SharedRuntime::throw_AbstractMethodError_entry();
164 }
165 assert(adapter() != nullptr, "must have");
166 return adapter()->get_i2c_entry();
167 }
168
169 address Method::get_c2i_entry() {
186 if (is_abstract()) {
187 return nullptr;
188 }
189 assert(VM_Version::supports_fast_class_init_checks(), "");
190 assert(adapter() != nullptr, "must have");
191 return adapter()->get_c2i_no_clinit_check_entry();
192 }
193
194 char* Method::name_and_sig_as_C_string() const {
195 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
196 }
197
198 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
199 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
200 }
201
202 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
203 const char* klass_name = klass->external_name();
204 int klass_name_len = (int)strlen(klass_name);
205 int method_name_len = method_name->utf8_length();
206 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
207 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
208 strcpy(dest, klass_name);
209 dest[klass_name_len + 0] = ':';
210 dest[klass_name_len + 1] = ':';
211 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
212 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
213 dest[len] = 0;
214 return dest;
215 }
216
217 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
218 Symbol* klass_name = klass->name();
219 klass_name->as_klass_external_name(buf, size);
220 int len = (int)strlen(buf);
221
222 if (len < size - 1) {
223 buf[len++] = '.';
224
225 method_name->as_C_string(&(buf[len]), size - len);
226 len = (int)strlen(buf);
227
228 signature->as_C_string(&(buf[len]), size - len);
229 }
230
231 return buf;
232 }
392 address Method::bcp_from(address bcp) const {
393 if (is_native() && bcp == nullptr) {
394 return code_base();
395 } else {
396 return bcp;
397 }
398 }
399
400 int Method::size(bool is_native) {
401 // If native, then include pointers for native_function and signature_handler
402 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
403 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
404 return align_metadata_size(header_size() + extra_words);
405 }
406
407 Symbol* Method::klass_name() const {
408 return method_holder()->name();
409 }
410
411 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
412 LogStreamHandle(Trace, aot) lsh;
413 if (lsh.is_enabled()) {
414 lsh.print("Iter(Method): %p ", this);
415 print_external_name(&lsh);
416 lsh.cr();
417 }
418 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
419 // holder is null for MH intrinsic methods
420 it->push(&_constMethod, MetaspaceClosure::_writable);
421 } else {
422 it->push(&_constMethod);
423 }
424 it->push(&_adapter);
425 it->push(&_method_data);
426 it->push(&_method_counters);
427 NOT_PRODUCT(it->push(&_name);)
428 }
429
430 #if INCLUDE_CDS
431 // Attempt to return method to original state. Clear any pointers
432 // (to objects outside the shared spaces). We won't be able to predict
433 // where they should point in a new JVM. Further initialize some
434 // entries now in order allow them to be write protected later.
435
436 void Method::remove_unshareable_info() {
437 unlink_method();
438 if (method_data() != nullptr) {
439 method_data()->remove_unshareable_info();
440 }
441 if (method_counters() != nullptr) {
442 method_counters()->remove_unshareable_info();
443 }
444 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
445 _adapter->remove_unshareable_info();
446 _adapter = nullptr;
447 }
448 if (method_data() != nullptr) {
449 method_data()->remove_unshareable_info();
450 }
451 if (method_counters() != nullptr) {
452 method_counters()->remove_unshareable_info();
453 }
454 JFR_ONLY(REMOVE_METHOD_ID(this);)
455 }
456
457 void Method::restore_unshareable_info(TRAPS) {
458 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
459 if (method_data() != nullptr) {
460 method_data()->restore_unshareable_info(CHECK);
461 }
462 if (method_counters() != nullptr) {
463 method_counters()->restore_unshareable_info(CHECK);
464 }
465 if (_adapter != nullptr) {
466 assert(_adapter->is_linked(), "must be");
467 _from_compiled_entry = _adapter->get_c2i_entry();
468 }
469 if (method_data() != nullptr) {
470 method_data()->restore_unshareable_info(CHECK);
471 }
472 if (method_counters() != nullptr) {
473 method_counters()->restore_unshareable_info(CHECK);
474 }
475 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
476 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
477 }
478 #endif
479
480 void Method::set_vtable_index(int index) {
481 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
482 // At runtime initialize_vtable is rerun as part of link_class_impl()
483 // for a shared class loaded by the non-boot loader to obtain the loader
484 // constraints based on the runtime classloaders' context.
485 return; // don't write into the shared class
486 } else {
487 _vtable_index = index;
488 }
489 }
490
491 void Method::set_itable_index(int index) {
492 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
493 // At runtime initialize_itable is rerun as part of link_class_impl()
494 // for a shared class loaded by the non-boot loader to obtain the loader
495 // constraints based on the runtime classloaders' context. The dumptime
496 // itable index should be the same as the runtime index.
680 // Do not profile the method if metaspace has hit an OOM previously
681 // allocating profiling data. Callers clear pending exception so don't
682 // add one here.
683 if (ClassLoaderDataGraph::has_metaspace_oom()) {
684 return;
685 }
686
687 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
688 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
689 if (HAS_PENDING_EXCEPTION) {
690 CompileBroker::log_metaspace_failure();
691 ClassLoaderDataGraph::set_metaspace_oom(true);
692 return; // return the exception (which is cleared)
693 }
694
695 if (!AtomicAccess::replace_if_null(&method->_method_data, method_data)) {
696 MetadataFactory::free_metadata(loader_data, method_data);
697 return;
698 }
699
700 if (ForceProfiling && TrainingData::need_data()) {
701 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
702 guarantee(mtd != nullptr, "");
703 }
704
705 if (PrintMethodData) {
706 ResourceMark rm(THREAD);
707 tty->print("build_profiling_method_data for ");
708 method->print_name(tty);
709 tty->cr();
710 // At the end of the run, the MDO, full of data, will be dumped.
711 }
712 }
713
714 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
715 // Do not profile the method if metaspace has hit an OOM previously
716 if (ClassLoaderDataGraph::has_metaspace_oom()) {
717 return nullptr;
718 }
719
720 methodHandle mh(current, m);
721 MethodCounters* counters;
722 if (current->is_Java_thread()) {
723 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
724 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
725 // if needed.
726 counters = MethodCounters::allocate_with_exception(mh, THREAD);
727 if (HAS_PENDING_EXCEPTION) {
728 CLEAR_PENDING_EXCEPTION;
729 }
730 } else {
731 // Call metaspace allocation that doesn't throw exception if the
732 // current thread isn't a JavaThread, ie. the VMThread.
733 counters = MethodCounters::allocate_no_exception(mh);
734 }
735
736 if (counters == nullptr) {
737 CompileBroker::log_metaspace_failure();
738 ClassLoaderDataGraph::set_metaspace_oom(true);
739 return nullptr;
740 }
741
742 if (!mh->init_method_counters(counters)) {
743 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
744 }
745
746 if (ForceProfiling && TrainingData::need_data()) {
747 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
748 guarantee(mtd != nullptr, "");
749 }
750
751 return mh->method_counters();
752 }
753
754 bool Method::init_method_counters(MethodCounters* counters) {
755 // Try to install a pointer to MethodCounters, return true on success.
756 return AtomicAccess::replace_if_null(&_method_counters, counters);
757 }
758
759 void Method::set_exception_handler_entered(int handler_bci) {
760 if (ProfileExceptionHandlers) {
761 MethodData* mdo = method_data();
762 if (mdo != nullptr) {
763 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
764 handler_data.set_exception_handler_entered();
765 }
766 }
767 }
768
769 int Method::extra_stack_words() {
770 // not an inline function, to avoid a header dependency on Interpreter
957 return (is_static() ||
958 method_holder()->major_version() < 51);
959 }
960
961 bool Method::is_static_initializer() const {
962 // For classfiles version 51 or greater, ensure that the clinit method is
963 // static. Non-static methods with the name "<clinit>" are not static
964 // initializers. (older classfiles exempted for backward compatibility)
965 return name() == vmSymbols::class_initializer_name() &&
966 has_valid_initializer_flags();
967 }
968
969 bool Method::is_object_initializer() const {
970 return name() == vmSymbols::object_initializer_name();
971 }
972
973 bool Method::needs_clinit_barrier() const {
974 return is_static() && !method_holder()->is_initialized();
975 }
976
977 bool Method::code_has_clinit_barriers() const {
978 nmethod* nm = code();
979 return (nm != nullptr) && nm->has_clinit_barriers();
980 }
981
982 bool Method::is_object_wait0() const {
983 return klass_name() == vmSymbols::java_lang_Object()
984 && name() == vmSymbols::wait_name();
985 }
986
987 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
988 int length = method->checked_exceptions_length();
989 if (length == 0) { // common case
990 return objArrayHandle(THREAD, Universe::the_empty_class_array());
991 } else {
992 methodHandle h_this(THREAD, method);
993 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
994 objArrayHandle mirrors (THREAD, m_oop);
995 for (int i = 0; i < length; i++) {
996 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
997 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
998 if (log_is_enabled(Warning, exceptions) &&
999 !k->is_subclass_of(vmClasses::Throwable_klass())) {
1000 ResourceMark rm(THREAD);
1001 log_warning(exceptions)(
1252 if (!CDSConfig::is_dumping_adapters()) {
1253 _adapter = nullptr;
1254 }
1255 _i2i_entry = nullptr;
1256 _from_compiled_entry = nullptr;
1257 _from_interpreted_entry = nullptr;
1258
1259 if (is_native()) {
1260 *native_function_addr() = nullptr;
1261 set_signature_handler(nullptr);
1262 }
1263 NOT_PRODUCT(set_compiled_invocation_count(0);)
1264
1265 clear_method_data();
1266 clear_method_counters();
1267 clear_is_not_c1_compilable();
1268 clear_is_not_c1_osr_compilable();
1269 clear_is_not_c2_compilable();
1270 clear_is_not_c2_osr_compilable();
1271 clear_queued_for_compilation();
1272 set_pending_queue_processed(false);
1273
1274 remove_unshareable_flags();
1275 }
1276
1277 void Method::remove_unshareable_flags() {
1278 // clear all the flags that shouldn't be in the archived version
1279 assert(!is_old(), "must be");
1280 assert(!is_obsolete(), "must be");
1281 assert(!is_deleted(), "must be");
1282
1283 set_is_prefixed_native(false);
1284 set_queued_for_compilation(false);
1285 set_pending_queue_processed(false);
1286 set_is_not_c2_compilable(false);
1287 set_is_not_c1_compilable(false);
1288 set_is_not_c2_osr_compilable(false);
1289 set_on_stack_flag(false);
1290 set_has_upcall_on_method_entry(false);
1291 set_has_upcall_on_method_exit(false);
1292 }
1293 #endif
1294
1295 // Called when the method_holder is getting linked. Setup entrypoints so the method
1296 // is ready to be called from interpreter, compiler, and vtables.
1297 void Method::link_method(const methodHandle& h_method, TRAPS) {
1298 if (log_is_enabled(Info, perf, class, link)) {
1299 ClassLoader::perf_ik_link_methods_count()->inc();
1300 }
1301
1302 // If the code cache is full, we may reenter this function for the
1303 // leftover methods that weren't linked.
1304 if (adapter() != nullptr) {
1305 if (adapter()->in_aot_cache()) {
1306 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1307 } else {
1308 return;
1309 }
1310 }
1311 assert( _code == nullptr, "nothing compiled yet" );
1342 assert(adapter()->is_linked(), "Adapter must have been linked");
1343 #endif
1344 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1345 }
1346
1347 // ONLY USE the h_method now as make_adapter may have blocked
1348
1349 if (h_method->is_continuation_native_intrinsic()) {
1350 _from_interpreted_entry = nullptr;
1351 _from_compiled_entry = nullptr;
1352 _i2i_entry = nullptr;
1353 if (Continuations::enabled()) {
1354 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1355 AdapterHandlerLibrary::create_native_wrapper(h_method);
1356 if (!h_method->has_compiled_code()) {
1357 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1358 }
1359 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1360 }
1361 }
1362 if (_preload_code != nullptr && !_aot_code_entry->not_entrant()) {
1363 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1364 set_code(h_method, _preload_code);
1365 assert(((nmethod*)_preload_code)->aot_code_entry() == _aot_code_entry, "sanity");
1366 }
1367 }
1368
1369 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1370 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1371 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1372
1373 // Adapters for compiled code are made eagerly here. They are fairly
1374 // small (generally < 100 bytes) and quick to make (and cached and shared)
1375 // so making them eagerly shouldn't be too expensive.
1376 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1377 if (adapter == nullptr ) {
1378 if (!is_init_completed()) {
1379 // Don't throw exceptions during VM initialization because java.lang.* classes
1380 // might not have been initialized, causing problems when constructing the
1381 // Java exception object.
1382 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1383 } else {
1384 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1385 }
1386 }
1387
1388 mh->set_adapter_entry(adapter);
1389 return adapter->get_c2i_entry();
1390 }
1391
|