100 sizes,
101 method_type,
102 CHECK_NULL);
103 int size = Method::size(access_flags.is_native());
104 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
105 }
106
107 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
108 NoSafepointVerifier no_safepoint;
109 set_constMethod(xconst);
110 set_access_flags(access_flags);
111 set_intrinsic_id(vmIntrinsics::_none);
112 clear_method_data();
113 clear_method_counters();
114 set_vtable_index(Method::garbage_vtable_index);
115
116 // Fix and bury in Method*
117 set_interpreter_entry(nullptr); // sets i2i entry and from_int
118 set_adapter_entry(nullptr);
119 Method::clear_code(); // from_c/from_i get set to c2i/i2i
120
121 if (access_flags.is_native()) {
122 clear_native_function();
123 set_signature_handler(nullptr);
124 }
125
126 NOT_PRODUCT(set_compiled_invocation_count(0);)
127 // Name is very useful for debugging.
128 NOT_PRODUCT(_name = name;)
129 }
130
131 // Release Method*. The nmethod will be gone when we get here because
132 // we've walked the code cache.
133 void Method::deallocate_contents(ClassLoaderData* loader_data) {
134 MetadataFactory::free_metadata(loader_data, constMethod());
135 set_constMethod(nullptr);
136 MetadataFactory::free_metadata(loader_data, method_data());
137 clear_method_data();
138 MetadataFactory::free_metadata(loader_data, method_counters());
139 clear_method_counters();
140 set_adapter_entry(nullptr);
141 // The nmethod will be gone when we get here.
142 if (code() != nullptr) _code = nullptr;
143 }
144
145 void Method::release_C_heap_structures() {
146 if (method_data()) {
147 method_data()->release_C_heap_structures();
148
149 // Destroy MethodData embedded lock
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 if (is_abstract()) {
156 return SharedRuntime::throw_AbstractMethodError_entry();
157 }
158 assert(adapter() != nullptr, "must have");
159 return adapter()->get_i2c_entry();
160 }
161
162 address Method::get_c2i_entry() {
173 }
174
175 address Method::get_c2i_no_clinit_check_entry() {
176 assert(VM_Version::supports_fast_class_init_checks(), "");
177 assert(adapter() != nullptr, "must have");
178 return adapter()->get_c2i_no_clinit_check_entry();
179 }
180
181 char* Method::name_and_sig_as_C_string() const {
182 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
183 }
184
185 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
186 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
187 }
188
189 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
190 const char* klass_name = klass->external_name();
191 int klass_name_len = (int)strlen(klass_name);
192 int method_name_len = method_name->utf8_length();
193 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
194 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
195 strcpy(dest, klass_name);
196 dest[klass_name_len] = '.';
197 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
198 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
199 dest[len] = 0;
200 return dest;
201 }
202
203 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
204 Symbol* klass_name = klass->name();
205 klass_name->as_klass_external_name(buf, size);
206 int len = (int)strlen(buf);
207
208 if (len < size - 1) {
209 buf[len++] = '.';
210
211 method_name->as_C_string(&(buf[len]), size - len);
212 len = (int)strlen(buf);
213
214 signature->as_C_string(&(buf[len]), size - len);
215 }
216
217 return buf;
218 }
378 address Method::bcp_from(address bcp) const {
379 if (is_native() && bcp == nullptr) {
380 return code_base();
381 } else {
382 return bcp;
383 }
384 }
385
386 int Method::size(bool is_native) {
387 // If native, then include pointers for native_function and signature_handler
388 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
389 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
390 return align_metadata_size(header_size() + extra_words);
391 }
392
393 Symbol* Method::klass_name() const {
394 return method_holder()->name();
395 }
396
397 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
398 log_trace(aot)("Iter(Method): %p", this);
399
400 if (!method_holder()->is_rewritten()) {
401 it->push(&_constMethod, MetaspaceClosure::_writable);
402 } else {
403 it->push(&_constMethod);
404 }
405 it->push(&_adapter);
406 it->push(&_method_data);
407 it->push(&_method_counters);
408 NOT_PRODUCT(it->push(&_name);)
409 }
410
411 #if INCLUDE_CDS
412 // Attempt to return method to original state. Clear any pointers
413 // (to objects outside the shared spaces). We won't be able to predict
414 // where they should point in a new JVM. Further initialize some
415 // entries now in order allow them to be write protected later.
416
417 void Method::remove_unshareable_info() {
418 unlink_method();
419 if (method_data() != nullptr) {
420 method_data()->remove_unshareable_info();
421 }
422 if (method_counters() != nullptr) {
423 method_counters()->remove_unshareable_info();
424 }
425 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
426 _adapter->remove_unshareable_info();
427 _adapter = nullptr;
428 }
429 JFR_ONLY(REMOVE_METHOD_ID(this);)
430 }
431
432 void Method::restore_unshareable_info(TRAPS) {
433 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
434 if (method_data() != nullptr) {
435 method_data()->restore_unshareable_info(CHECK);
436 }
437 if (method_counters() != nullptr) {
438 method_counters()->restore_unshareable_info(CHECK);
439 }
440 if (_adapter != nullptr) {
441 assert(_adapter->is_linked(), "must be");
442 _from_compiled_entry = _adapter->get_c2i_entry();
443 }
444 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
445 }
446 #endif
447
448 void Method::set_vtable_index(int index) {
449 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
450 // At runtime initialize_vtable is rerun as part of link_class_impl()
451 // for a shared class loaded by the non-boot loader to obtain the loader
452 // constraints based on the runtime classloaders' context.
453 return; // don't write into the shared class
454 } else {
455 _vtable_index = index;
456 }
457 }
458
459 void Method::set_itable_index(int index) {
460 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
461 // At runtime initialize_itable is rerun as part of link_class_impl()
462 // for a shared class loaded by the non-boot loader to obtain the loader
463 // constraints based on the runtime classloaders' context. The dumptime
464 // itable index should be the same as the runtime index.
648 // Do not profile the method if metaspace has hit an OOM previously
649 // allocating profiling data. Callers clear pending exception so don't
650 // add one here.
651 if (ClassLoaderDataGraph::has_metaspace_oom()) {
652 return;
653 }
654
655 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
656 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
657 if (HAS_PENDING_EXCEPTION) {
658 CompileBroker::log_metaspace_failure();
659 ClassLoaderDataGraph::set_metaspace_oom(true);
660 return; // return the exception (which is cleared)
661 }
662
663 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
664 MetadataFactory::free_metadata(loader_data, method_data);
665 return;
666 }
667
668 if (PrintMethodData && (Verbose || WizardMode)) {
669 ResourceMark rm(THREAD);
670 tty->print("build_profiling_method_data for ");
671 method->print_name(tty);
672 tty->cr();
673 // At the end of the run, the MDO, full of data, will be dumped.
674 }
675 }
676
677 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
678 // Do not profile the method if metaspace has hit an OOM previously
679 if (ClassLoaderDataGraph::has_metaspace_oom()) {
680 return nullptr;
681 }
682
683 methodHandle mh(current, m);
684 MethodCounters* counters;
685 if (current->is_Java_thread()) {
686 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
687 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
688 // if needed.
689 counters = MethodCounters::allocate_with_exception(mh, THREAD);
690 if (HAS_PENDING_EXCEPTION) {
691 CLEAR_PENDING_EXCEPTION;
692 }
693 } else {
694 // Call metaspace allocation that doesn't throw exception if the
695 // current thread isn't a JavaThread, ie. the VMThread.
696 counters = MethodCounters::allocate_no_exception(mh);
697 }
698
699 if (counters == nullptr) {
700 CompileBroker::log_metaspace_failure();
701 ClassLoaderDataGraph::set_metaspace_oom(true);
702 return nullptr;
703 }
704
705 if (!mh->init_method_counters(counters)) {
706 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
707 }
708
709 return mh->method_counters();
710 }
711
712 bool Method::init_method_counters(MethodCounters* counters) {
713 // Try to install a pointer to MethodCounters, return true on success.
714 return Atomic::replace_if_null(&_method_counters, counters);
715 }
716
717 void Method::set_exception_handler_entered(int handler_bci) {
718 if (ProfileExceptionHandlers) {
719 MethodData* mdo = method_data();
720 if (mdo != nullptr) {
721 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
722 handler_data.set_exception_handler_entered();
723 }
724 }
725 }
726
727 int Method::extra_stack_words() {
728 // not an inline function, to avoid a header dependency on Interpreter
915 return (is_static() ||
916 method_holder()->major_version() < 51);
917 }
918
919 bool Method::is_static_initializer() const {
920 // For classfiles version 51 or greater, ensure that the clinit method is
921 // static. Non-static methods with the name "<clinit>" are not static
922 // initializers. (older classfiles exempted for backward compatibility)
923 return name() == vmSymbols::class_initializer_name() &&
924 has_valid_initializer_flags();
925 }
926
927 bool Method::is_object_initializer() const {
928 return name() == vmSymbols::object_initializer_name();
929 }
930
931 bool Method::needs_clinit_barrier() const {
932 return is_static() && !method_holder()->is_initialized();
933 }
934
935 bool Method::is_object_wait0() const {
936 return klass_name() == vmSymbols::java_lang_Object()
937 && name() == vmSymbols::wait_name();
938 }
939
940 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
941 int length = method->checked_exceptions_length();
942 if (length == 0) { // common case
943 return objArrayHandle(THREAD, Universe::the_empty_class_array());
944 } else {
945 methodHandle h_this(THREAD, method);
946 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
947 objArrayHandle mirrors (THREAD, m_oop);
948 for (int i = 0; i < length; i++) {
949 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
950 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
951 if (log_is_enabled(Warning, exceptions) &&
952 !k->is_subclass_of(vmClasses::Throwable_klass())) {
953 ResourceMark rm(THREAD);
954 log_warning(exceptions)(
1205 if (!CDSConfig::is_dumping_adapters()) {
1206 _adapter = nullptr;
1207 }
1208 _i2i_entry = nullptr;
1209 _from_compiled_entry = nullptr;
1210 _from_interpreted_entry = nullptr;
1211
1212 if (is_native()) {
1213 *native_function_addr() = nullptr;
1214 set_signature_handler(nullptr);
1215 }
1216 NOT_PRODUCT(set_compiled_invocation_count(0);)
1217
1218 clear_method_data();
1219 clear_method_counters();
1220 clear_is_not_c1_compilable();
1221 clear_is_not_c1_osr_compilable();
1222 clear_is_not_c2_compilable();
1223 clear_is_not_c2_osr_compilable();
1224 clear_queued_for_compilation();
1225
1226 remove_unshareable_flags();
1227 }
1228
1229 void Method::remove_unshareable_flags() {
1230 // clear all the flags that shouldn't be in the archived version
1231 assert(!is_old(), "must be");
1232 assert(!is_obsolete(), "must be");
1233 assert(!is_deleted(), "must be");
1234
1235 set_is_prefixed_native(false);
1236 set_queued_for_compilation(false);
1237 set_is_not_c2_compilable(false);
1238 set_is_not_c1_compilable(false);
1239 set_is_not_c2_osr_compilable(false);
1240 set_on_stack_flag(false);
1241 }
1242 #endif
1243
1244 // Called when the method_holder is getting linked. Setup entrypoints so the method
1245 // is ready to be called from interpreter, compiler, and vtables.
1246 void Method::link_method(const methodHandle& h_method, TRAPS) {
1247 if (log_is_enabled(Info, perf, class, link)) {
1248 ClassLoader::perf_ik_link_methods_count()->inc();
1249 }
1250
1251 // If the code cache is full, we may reenter this function for the
1252 // leftover methods that weren't linked.
1253 if (adapter() != nullptr) {
1254 if (adapter()->in_aot_cache()) {
1255 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1256 } else {
1257 return;
1258 }
1259 }
1260 assert( _code == nullptr, "nothing compiled yet" );
1289 (void) make_adapters(h_method, CHECK);
1290 assert(adapter()->is_linked(), "Adapter must have been linked");
1291 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1292 }
1293
1294 // ONLY USE the h_method now as make_adapter may have blocked
1295
1296 if (h_method->is_continuation_native_intrinsic()) {
1297 _from_interpreted_entry = nullptr;
1298 _from_compiled_entry = nullptr;
1299 _i2i_entry = nullptr;
1300 if (Continuations::enabled()) {
1301 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1302 AdapterHandlerLibrary::create_native_wrapper(h_method);
1303 if (!h_method->has_compiled_code()) {
1304 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1305 }
1306 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1307 }
1308 }
1309 }
1310
1311 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1312 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1313 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1314
1315 // Adapters for compiled code are made eagerly here. They are fairly
1316 // small (generally < 100 bytes) and quick to make (and cached and shared)
1317 // so making them eagerly shouldn't be too expensive.
1318 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1319 if (adapter == nullptr ) {
1320 if (!is_init_completed()) {
1321 // Don't throw exceptions during VM initialization because java.lang.* classes
1322 // might not have been initialized, causing problems when constructing the
1323 // Java exception object.
1324 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1325 } else {
1326 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1327 }
1328 }
1329
1330 mh->set_adapter_entry(adapter);
1331 return adapter->get_c2i_entry();
1332 }
1333
|
100 sizes,
101 method_type,
102 CHECK_NULL);
103 int size = Method::size(access_flags.is_native());
104 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
105 }
106
107 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
108 NoSafepointVerifier no_safepoint;
109 set_constMethod(xconst);
110 set_access_flags(access_flags);
111 set_intrinsic_id(vmIntrinsics::_none);
112 clear_method_data();
113 clear_method_counters();
114 set_vtable_index(Method::garbage_vtable_index);
115
116 // Fix and bury in Method*
117 set_interpreter_entry(nullptr); // sets i2i entry and from_int
118 set_adapter_entry(nullptr);
119 Method::clear_code(); // from_c/from_i get set to c2i/i2i
120 set_preload_code(nullptr);
121 set_aot_code_entry(nullptr);
122
123 if (access_flags.is_native()) {
124 clear_native_function();
125 set_signature_handler(nullptr);
126 }
127
128 NOT_PRODUCT(set_compiled_invocation_count(0);)
129 // Name is very useful for debugging.
130 NOT_PRODUCT(_name = name;)
131 }
132
133 // Release Method*. The nmethod will be gone when we get here because
134 // we've walked the code cache.
135 void Method::deallocate_contents(ClassLoaderData* loader_data) {
136 MetadataFactory::free_metadata(loader_data, constMethod());
137 set_constMethod(nullptr);
138 MetadataFactory::free_metadata(loader_data, method_data());
139 clear_method_data();
140 MetadataFactory::free_metadata(loader_data, method_counters());
141 clear_method_counters();
142 set_adapter_entry(nullptr);
143 // The nmethod will be gone when we get here.
144 if (code() != nullptr) _code = nullptr;
145 if (aot_code_entry() != nullptr) {
146 set_preload_code(nullptr);
147 AOTCodeCache::invalidate(aot_code_entry());
148 set_aot_code_entry(nullptr);
149 }
150 }
151
152 void Method::release_C_heap_structures() {
153 if (method_data()) {
154 method_data()->release_C_heap_structures();
155
156 // Destroy MethodData embedded lock
157 method_data()->~MethodData();
158 }
159 }
160
161 address Method::get_i2c_entry() {
162 if (is_abstract()) {
163 return SharedRuntime::throw_AbstractMethodError_entry();
164 }
165 assert(adapter() != nullptr, "must have");
166 return adapter()->get_i2c_entry();
167 }
168
169 address Method::get_c2i_entry() {
180 }
181
182 address Method::get_c2i_no_clinit_check_entry() {
183 assert(VM_Version::supports_fast_class_init_checks(), "");
184 assert(adapter() != nullptr, "must have");
185 return adapter()->get_c2i_no_clinit_check_entry();
186 }
187
188 char* Method::name_and_sig_as_C_string() const {
189 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
190 }
191
192 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
193 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
194 }
195
196 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
197 const char* klass_name = klass->external_name();
198 int klass_name_len = (int)strlen(klass_name);
199 int method_name_len = method_name->utf8_length();
200 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
201 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
202 strcpy(dest, klass_name);
203 dest[klass_name_len + 0] = ':';
204 dest[klass_name_len + 1] = ':';
205 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
206 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
207 dest[len] = 0;
208 return dest;
209 }
210
211 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
212 Symbol* klass_name = klass->name();
213 klass_name->as_klass_external_name(buf, size);
214 int len = (int)strlen(buf);
215
216 if (len < size - 1) {
217 buf[len++] = '.';
218
219 method_name->as_C_string(&(buf[len]), size - len);
220 len = (int)strlen(buf);
221
222 signature->as_C_string(&(buf[len]), size - len);
223 }
224
225 return buf;
226 }
386 address Method::bcp_from(address bcp) const {
387 if (is_native() && bcp == nullptr) {
388 return code_base();
389 } else {
390 return bcp;
391 }
392 }
393
394 int Method::size(bool is_native) {
395 // If native, then include pointers for native_function and signature_handler
396 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
397 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
398 return align_metadata_size(header_size() + extra_words);
399 }
400
401 Symbol* Method::klass_name() const {
402 return method_holder()->name();
403 }
404
405 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
406 LogStreamHandle(Trace, aot) lsh;
407 if (lsh.is_enabled()) {
408 lsh.print("Iter(Method): %p ", this);
409 print_external_name(&lsh);
410 lsh.cr();
411 }
412 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
413 // holder is null for MH intrinsic methods
414 it->push(&_constMethod, MetaspaceClosure::_writable);
415 } else {
416 it->push(&_constMethod);
417 }
418 it->push(&_adapter);
419 it->push(&_method_data);
420 it->push(&_method_counters);
421 NOT_PRODUCT(it->push(&_name);)
422 }
423
424 #if INCLUDE_CDS
425 // Attempt to return method to original state. Clear any pointers
426 // (to objects outside the shared spaces). We won't be able to predict
427 // where they should point in a new JVM. Further initialize some
428 // entries now in order allow them to be write protected later.
429
430 void Method::remove_unshareable_info() {
431 unlink_method();
432 if (method_data() != nullptr) {
433 method_data()->remove_unshareable_info();
434 }
435 if (method_counters() != nullptr) {
436 method_counters()->remove_unshareable_info();
437 }
438 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
439 _adapter->remove_unshareable_info();
440 _adapter = nullptr;
441 }
442 if (method_data() != nullptr) {
443 method_data()->remove_unshareable_info();
444 }
445 if (method_counters() != nullptr) {
446 method_counters()->remove_unshareable_info();
447 }
448 JFR_ONLY(REMOVE_METHOD_ID(this);)
449 }
450
451 void Method::restore_unshareable_info(TRAPS) {
452 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
453 if (method_data() != nullptr) {
454 method_data()->restore_unshareable_info(CHECK);
455 }
456 if (method_counters() != nullptr) {
457 method_counters()->restore_unshareable_info(CHECK);
458 }
459 if (_adapter != nullptr) {
460 assert(_adapter->is_linked(), "must be");
461 _from_compiled_entry = _adapter->get_c2i_entry();
462 }
463 if (method_data() != nullptr) {
464 method_data()->restore_unshareable_info(CHECK);
465 }
466 if (method_counters() != nullptr) {
467 method_counters()->restore_unshareable_info(CHECK);
468 }
469 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
470 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
471 }
472 #endif
473
474 void Method::set_vtable_index(int index) {
475 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
476 // At runtime initialize_vtable is rerun as part of link_class_impl()
477 // for a shared class loaded by the non-boot loader to obtain the loader
478 // constraints based on the runtime classloaders' context.
479 return; // don't write into the shared class
480 } else {
481 _vtable_index = index;
482 }
483 }
484
485 void Method::set_itable_index(int index) {
486 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
487 // At runtime initialize_itable is rerun as part of link_class_impl()
488 // for a shared class loaded by the non-boot loader to obtain the loader
489 // constraints based on the runtime classloaders' context. The dumptime
490 // itable index should be the same as the runtime index.
674 // Do not profile the method if metaspace has hit an OOM previously
675 // allocating profiling data. Callers clear pending exception so don't
676 // add one here.
677 if (ClassLoaderDataGraph::has_metaspace_oom()) {
678 return;
679 }
680
681 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
682 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
683 if (HAS_PENDING_EXCEPTION) {
684 CompileBroker::log_metaspace_failure();
685 ClassLoaderDataGraph::set_metaspace_oom(true);
686 return; // return the exception (which is cleared)
687 }
688
689 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
690 MetadataFactory::free_metadata(loader_data, method_data);
691 return;
692 }
693
694 if (ForceProfiling && TrainingData::need_data()) {
695 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
696 guarantee(mtd != nullptr, "");
697 }
698
699 if (PrintMethodData) {
700 ResourceMark rm(THREAD);
701 tty->print("build_profiling_method_data for ");
702 method->print_name(tty);
703 tty->cr();
704 // At the end of the run, the MDO, full of data, will be dumped.
705 }
706 }
707
708 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
709 // Do not profile the method if metaspace has hit an OOM previously
710 if (ClassLoaderDataGraph::has_metaspace_oom()) {
711 return nullptr;
712 }
713
714 methodHandle mh(current, m);
715 MethodCounters* counters;
716 if (current->is_Java_thread()) {
717 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
718 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
719 // if needed.
720 counters = MethodCounters::allocate_with_exception(mh, THREAD);
721 if (HAS_PENDING_EXCEPTION) {
722 CLEAR_PENDING_EXCEPTION;
723 }
724 } else {
725 // Call metaspace allocation that doesn't throw exception if the
726 // current thread isn't a JavaThread, ie. the VMThread.
727 counters = MethodCounters::allocate_no_exception(mh);
728 }
729
730 if (counters == nullptr) {
731 CompileBroker::log_metaspace_failure();
732 ClassLoaderDataGraph::set_metaspace_oom(true);
733 return nullptr;
734 }
735
736 if (!mh->init_method_counters(counters)) {
737 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
738 }
739
740 if (ForceProfiling && TrainingData::need_data()) {
741 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
742 guarantee(mtd != nullptr, "");
743 }
744
745 return mh->method_counters();
746 }
747
748 bool Method::init_method_counters(MethodCounters* counters) {
749 // Try to install a pointer to MethodCounters, return true on success.
750 return Atomic::replace_if_null(&_method_counters, counters);
751 }
752
753 void Method::set_exception_handler_entered(int handler_bci) {
754 if (ProfileExceptionHandlers) {
755 MethodData* mdo = method_data();
756 if (mdo != nullptr) {
757 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
758 handler_data.set_exception_handler_entered();
759 }
760 }
761 }
762
763 int Method::extra_stack_words() {
764 // not an inline function, to avoid a header dependency on Interpreter
951 return (is_static() ||
952 method_holder()->major_version() < 51);
953 }
954
955 bool Method::is_static_initializer() const {
956 // For classfiles version 51 or greater, ensure that the clinit method is
957 // static. Non-static methods with the name "<clinit>" are not static
958 // initializers. (older classfiles exempted for backward compatibility)
959 return name() == vmSymbols::class_initializer_name() &&
960 has_valid_initializer_flags();
961 }
962
963 bool Method::is_object_initializer() const {
964 return name() == vmSymbols::object_initializer_name();
965 }
966
967 bool Method::needs_clinit_barrier() const {
968 return is_static() && !method_holder()->is_initialized();
969 }
970
971 bool Method::code_has_clinit_barriers() const {
972 nmethod* nm = code();
973 return (nm != nullptr) && nm->has_clinit_barriers();
974 }
975
976 bool Method::is_object_wait0() const {
977 return klass_name() == vmSymbols::java_lang_Object()
978 && name() == vmSymbols::wait_name();
979 }
980
981 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
982 int length = method->checked_exceptions_length();
983 if (length == 0) { // common case
984 return objArrayHandle(THREAD, Universe::the_empty_class_array());
985 } else {
986 methodHandle h_this(THREAD, method);
987 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
988 objArrayHandle mirrors (THREAD, m_oop);
989 for (int i = 0; i < length; i++) {
990 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
991 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
992 if (log_is_enabled(Warning, exceptions) &&
993 !k->is_subclass_of(vmClasses::Throwable_klass())) {
994 ResourceMark rm(THREAD);
995 log_warning(exceptions)(
1246 if (!CDSConfig::is_dumping_adapters()) {
1247 _adapter = nullptr;
1248 }
1249 _i2i_entry = nullptr;
1250 _from_compiled_entry = nullptr;
1251 _from_interpreted_entry = nullptr;
1252
1253 if (is_native()) {
1254 *native_function_addr() = nullptr;
1255 set_signature_handler(nullptr);
1256 }
1257 NOT_PRODUCT(set_compiled_invocation_count(0);)
1258
1259 clear_method_data();
1260 clear_method_counters();
1261 clear_is_not_c1_compilable();
1262 clear_is_not_c1_osr_compilable();
1263 clear_is_not_c2_compilable();
1264 clear_is_not_c2_osr_compilable();
1265 clear_queued_for_compilation();
1266 set_pending_queue_processed(false);
1267
1268 remove_unshareable_flags();
1269 }
1270
1271 void Method::remove_unshareable_flags() {
1272 // clear all the flags that shouldn't be in the archived version
1273 assert(!is_old(), "must be");
1274 assert(!is_obsolete(), "must be");
1275 assert(!is_deleted(), "must be");
1276
1277 set_is_prefixed_native(false);
1278 set_queued_for_compilation(false);
1279 set_pending_queue_processed(false);
1280 set_is_not_c2_compilable(false);
1281 set_is_not_c1_compilable(false);
1282 set_is_not_c2_osr_compilable(false);
1283 set_on_stack_flag(false);
1284 set_has_upcall_on_method_entry(false);
1285 set_has_upcall_on_method_exit(false);
1286 }
1287 #endif
1288
1289 // Called when the method_holder is getting linked. Setup entrypoints so the method
1290 // is ready to be called from interpreter, compiler, and vtables.
1291 void Method::link_method(const methodHandle& h_method, TRAPS) {
1292 if (log_is_enabled(Info, perf, class, link)) {
1293 ClassLoader::perf_ik_link_methods_count()->inc();
1294 }
1295
1296 // If the code cache is full, we may reenter this function for the
1297 // leftover methods that weren't linked.
1298 if (adapter() != nullptr) {
1299 if (adapter()->in_aot_cache()) {
1300 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1301 } else {
1302 return;
1303 }
1304 }
1305 assert( _code == nullptr, "nothing compiled yet" );
1334 (void) make_adapters(h_method, CHECK);
1335 assert(adapter()->is_linked(), "Adapter must have been linked");
1336 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1337 }
1338
1339 // ONLY USE the h_method now as make_adapter may have blocked
1340
1341 if (h_method->is_continuation_native_intrinsic()) {
1342 _from_interpreted_entry = nullptr;
1343 _from_compiled_entry = nullptr;
1344 _i2i_entry = nullptr;
1345 if (Continuations::enabled()) {
1346 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1347 AdapterHandlerLibrary::create_native_wrapper(h_method);
1348 if (!h_method->has_compiled_code()) {
1349 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1350 }
1351 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1352 }
1353 }
1354 if (_preload_code != nullptr && !_aot_code_entry->not_entrant()) {
1355 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1356 set_code(h_method, _preload_code);
1357 assert(((nmethod*)_preload_code)->aot_code_entry() == _aot_code_entry, "sanity");
1358 }
1359 }
1360
1361 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1362 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1363 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1364
1365 // Adapters for compiled code are made eagerly here. They are fairly
1366 // small (generally < 100 bytes) and quick to make (and cached and shared)
1367 // so making them eagerly shouldn't be too expensive.
1368 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1369 if (adapter == nullptr ) {
1370 if (!is_init_completed()) {
1371 // Don't throw exceptions during VM initialization because java.lang.* classes
1372 // might not have been initialized, causing problems when constructing the
1373 // Java exception object.
1374 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1375 } else {
1376 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1377 }
1378 }
1379
1380 mh->set_adapter_entry(adapter);
1381 return adapter->get_c2i_entry();
1382 }
1383
|