99 sizes,
100 method_type,
101 CHECK_NULL);
102 int size = Method::size(access_flags.is_native());
103 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
104 }
105
106 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
107 NoSafepointVerifier no_safepoint;
108 set_constMethod(xconst);
109 set_access_flags(access_flags);
110 set_intrinsic_id(vmIntrinsics::_none);
111 clear_method_data();
112 clear_method_counters();
113 set_vtable_index(Method::garbage_vtable_index);
114
115 // Fix and bury in Method*
116 set_interpreter_entry(nullptr); // sets i2i entry and from_int
117 set_adapter_entry(nullptr);
118 Method::clear_code(); // from_c/from_i get set to c2i/i2i
119
120 if (access_flags.is_native()) {
121 clear_native_function();
122 set_signature_handler(nullptr);
123 }
124
125 NOT_PRODUCT(set_compiled_invocation_count(0);)
126 // Name is very useful for debugging.
127 NOT_PRODUCT(_name = name;)
128 }
129
130 // Release Method*. The nmethod will be gone when we get here because
131 // we've walked the code cache.
132 void Method::deallocate_contents(ClassLoaderData* loader_data) {
133 MetadataFactory::free_metadata(loader_data, constMethod());
134 set_constMethod(nullptr);
135 MetadataFactory::free_metadata(loader_data, method_data());
136 clear_method_data();
137 MetadataFactory::free_metadata(loader_data, method_counters());
138 clear_method_counters();
139 set_adapter_entry(nullptr);
140 // The nmethod will be gone when we get here.
141 if (code() != nullptr) _code = nullptr;
142 }
143
144 void Method::release_C_heap_structures() {
145 if (method_data()) {
146 method_data()->release_C_heap_structures();
147
148 // Destroy MethodData embedded lock
149 method_data()->~MethodData();
150 }
151 }
152
153 address Method::get_i2c_entry() {
154 assert(adapter() != nullptr, "must have");
155 return adapter()->get_i2c_entry();
156 }
157
158 address Method::get_c2i_entry() {
159 assert(adapter() != nullptr, "must have");
160 return adapter()->get_c2i_entry();
161 }
166 }
167
168 address Method::get_c2i_no_clinit_check_entry() {
169 assert(VM_Version::supports_fast_class_init_checks(), "");
170 assert(adapter() != nullptr, "must have");
171 return adapter()->get_c2i_no_clinit_check_entry();
172 }
173
174 char* Method::name_and_sig_as_C_string() const {
175 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
176 }
177
178 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
179 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
180 }
181
182 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
183 const char* klass_name = klass->external_name();
184 int klass_name_len = (int)strlen(klass_name);
185 int method_name_len = method_name->utf8_length();
186 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
187 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
188 strcpy(dest, klass_name);
189 dest[klass_name_len] = '.';
190 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
191 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
192 dest[len] = 0;
193 return dest;
194 }
195
196 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
197 Symbol* klass_name = klass->name();
198 klass_name->as_klass_external_name(buf, size);
199 int len = (int)strlen(buf);
200
201 if (len < size - 1) {
202 buf[len++] = '.';
203
204 method_name->as_C_string(&(buf[len]), size - len);
205 len = (int)strlen(buf);
206
207 signature->as_C_string(&(buf[len]), size - len);
208 }
209
210 return buf;
211 }
371 address Method::bcp_from(address bcp) const {
372 if (is_native() && bcp == nullptr) {
373 return code_base();
374 } else {
375 return bcp;
376 }
377 }
378
379 int Method::size(bool is_native) {
380 // If native, then include pointers for native_function and signature_handler
381 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
382 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
383 return align_metadata_size(header_size() + extra_words);
384 }
385
386 Symbol* Method::klass_name() const {
387 return method_holder()->name();
388 }
389
390 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
391 log_trace(aot)("Iter(Method): %p", this);
392
393 if (!method_holder()->is_rewritten()) {
394 it->push(&_constMethod, MetaspaceClosure::_writable);
395 } else {
396 it->push(&_constMethod);
397 }
398 it->push(&_adapter);
399 it->push(&_method_data);
400 it->push(&_method_counters);
401 NOT_PRODUCT(it->push(&_name);)
402 }
403
404 #if INCLUDE_CDS
405 // Attempt to return method to original state. Clear any pointers
406 // (to objects outside the shared spaces). We won't be able to predict
407 // where they should point in a new JVM. Further initialize some
408 // entries now in order allow them to be write protected later.
409
410 void Method::remove_unshareable_info() {
411 unlink_method();
412 if (method_data() != nullptr) {
413 method_data()->remove_unshareable_info();
414 }
415 if (method_counters() != nullptr) {
416 method_counters()->remove_unshareable_info();
417 }
418 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
419 _adapter->remove_unshareable_info();
420 _adapter = nullptr;
421 }
422 JFR_ONLY(REMOVE_METHOD_ID(this);)
423 }
424
425 void Method::restore_unshareable_info(TRAPS) {
426 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
427 if (method_data() != nullptr) {
428 method_data()->restore_unshareable_info(CHECK);
429 }
430 if (method_counters() != nullptr) {
431 method_counters()->restore_unshareable_info(CHECK);
432 }
433 if (_adapter != nullptr) {
434 assert(_adapter->is_linked(), "must be");
435 _from_compiled_entry = _adapter->get_c2i_entry();
436 }
437 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
438 }
439 #endif
440
441 void Method::set_vtable_index(int index) {
442 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
443 // At runtime initialize_vtable is rerun as part of link_class_impl()
444 // for a shared class loaded by the non-boot loader to obtain the loader
445 // constraints based on the runtime classloaders' context.
446 return; // don't write into the shared class
447 } else {
448 _vtable_index = index;
449 }
450 }
451
452 void Method::set_itable_index(int index) {
453 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
454 // At runtime initialize_itable is rerun as part of link_class_impl()
455 // for a shared class loaded by the non-boot loader to obtain the loader
456 // constraints based on the runtime classloaders' context. The dumptime
457 // itable index should be the same as the runtime index.
641 // Do not profile the method if metaspace has hit an OOM previously
642 // allocating profiling data. Callers clear pending exception so don't
643 // add one here.
644 if (ClassLoaderDataGraph::has_metaspace_oom()) {
645 return;
646 }
647
648 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
649 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
650 if (HAS_PENDING_EXCEPTION) {
651 CompileBroker::log_metaspace_failure();
652 ClassLoaderDataGraph::set_metaspace_oom(true);
653 return; // return the exception (which is cleared)
654 }
655
656 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
657 MetadataFactory::free_metadata(loader_data, method_data);
658 return;
659 }
660
661 if (PrintMethodData && (Verbose || WizardMode)) {
662 ResourceMark rm(THREAD);
663 tty->print("build_profiling_method_data for ");
664 method->print_name(tty);
665 tty->cr();
666 // At the end of the run, the MDO, full of data, will be dumped.
667 }
668 }
669
670 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
671 // Do not profile the method if metaspace has hit an OOM previously
672 if (ClassLoaderDataGraph::has_metaspace_oom()) {
673 return nullptr;
674 }
675
676 methodHandle mh(current, m);
677 MethodCounters* counters;
678 if (current->is_Java_thread()) {
679 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
680 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
681 // if needed.
682 counters = MethodCounters::allocate_with_exception(mh, THREAD);
683 if (HAS_PENDING_EXCEPTION) {
684 CLEAR_PENDING_EXCEPTION;
685 }
686 } else {
687 // Call metaspace allocation that doesn't throw exception if the
688 // current thread isn't a JavaThread, ie. the VMThread.
689 counters = MethodCounters::allocate_no_exception(mh);
690 }
691
692 if (counters == nullptr) {
693 CompileBroker::log_metaspace_failure();
694 ClassLoaderDataGraph::set_metaspace_oom(true);
695 return nullptr;
696 }
697
698 if (!mh->init_method_counters(counters)) {
699 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
700 }
701
702 return mh->method_counters();
703 }
704
705 bool Method::init_method_counters(MethodCounters* counters) {
706 // Try to install a pointer to MethodCounters, return true on success.
707 return Atomic::replace_if_null(&_method_counters, counters);
708 }
709
710 void Method::set_exception_handler_entered(int handler_bci) {
711 if (ProfileExceptionHandlers) {
712 MethodData* mdo = method_data();
713 if (mdo != nullptr) {
714 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
715 handler_data.set_exception_handler_entered();
716 }
717 }
718 }
719
720 int Method::extra_stack_words() {
721 // not an inline function, to avoid a header dependency on Interpreter
908 return (is_static() ||
909 method_holder()->major_version() < 51);
910 }
911
912 bool Method::is_static_initializer() const {
913 // For classfiles version 51 or greater, ensure that the clinit method is
914 // static. Non-static methods with the name "<clinit>" are not static
915 // initializers. (older classfiles exempted for backward compatibility)
916 return name() == vmSymbols::class_initializer_name() &&
917 has_valid_initializer_flags();
918 }
919
920 bool Method::is_object_initializer() const {
921 return name() == vmSymbols::object_initializer_name();
922 }
923
924 bool Method::needs_clinit_barrier() const {
925 return is_static() && !method_holder()->is_initialized();
926 }
927
928 bool Method::is_object_wait0() const {
929 return klass_name() == vmSymbols::java_lang_Object()
930 && name() == vmSymbols::wait_name();
931 }
932
933 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
934 int length = method->checked_exceptions_length();
935 if (length == 0) { // common case
936 return objArrayHandle(THREAD, Universe::the_empty_class_array());
937 } else {
938 methodHandle h_this(THREAD, method);
939 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
940 objArrayHandle mirrors (THREAD, m_oop);
941 for (int i = 0; i < length; i++) {
942 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
943 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
944 if (log_is_enabled(Warning, exceptions) &&
945 !k->is_subclass_of(vmClasses::Throwable_klass())) {
946 ResourceMark rm(THREAD);
947 log_warning(exceptions)(
1198 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1199 _adapter = nullptr;
1200 }
1201 _i2i_entry = nullptr;
1202 _from_compiled_entry = nullptr;
1203 _from_interpreted_entry = nullptr;
1204
1205 if (is_native()) {
1206 *native_function_addr() = nullptr;
1207 set_signature_handler(nullptr);
1208 }
1209 NOT_PRODUCT(set_compiled_invocation_count(0);)
1210
1211 clear_method_data();
1212 clear_method_counters();
1213 clear_is_not_c1_compilable();
1214 clear_is_not_c1_osr_compilable();
1215 clear_is_not_c2_compilable();
1216 clear_is_not_c2_osr_compilable();
1217 clear_queued_for_compilation();
1218
1219 remove_unshareable_flags();
1220 }
1221
1222 void Method::remove_unshareable_flags() {
1223 // clear all the flags that shouldn't be in the archived version
1224 assert(!is_old(), "must be");
1225 assert(!is_obsolete(), "must be");
1226 assert(!is_deleted(), "must be");
1227
1228 set_is_prefixed_native(false);
1229 set_queued_for_compilation(false);
1230 set_is_not_c2_compilable(false);
1231 set_is_not_c1_compilable(false);
1232 set_is_not_c2_osr_compilable(false);
1233 set_on_stack_flag(false);
1234 }
1235 #endif
1236
1237 // Called when the method_holder is getting linked. Setup entrypoints so the method
1238 // is ready to be called from interpreter, compiler, and vtables.
1239 void Method::link_method(const methodHandle& h_method, TRAPS) {
1240 if (log_is_enabled(Info, perf, class, link)) {
1241 ClassLoader::perf_ik_link_methods_count()->inc();
1242 }
1243
1244 // If the code cache is full, we may reenter this function for the
1245 // leftover methods that weren't linked.
1246 if (adapter() != nullptr) {
1247 if (adapter()->is_shared()) {
1248 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1249 } else {
1250 return;
1251 }
1252 }
1253 assert( _code == nullptr, "nothing compiled yet" );
1279 if (_adapter == nullptr) {
1280 (void) make_adapters(h_method, CHECK);
1281 assert(adapter()->is_linked(), "Adapter must have been linked");
1282 }
1283
1284 // ONLY USE the h_method now as make_adapter may have blocked
1285
1286 if (h_method->is_continuation_native_intrinsic()) {
1287 _from_interpreted_entry = nullptr;
1288 _from_compiled_entry = nullptr;
1289 _i2i_entry = nullptr;
1290 if (Continuations::enabled()) {
1291 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1292 AdapterHandlerLibrary::create_native_wrapper(h_method);
1293 if (!h_method->has_compiled_code()) {
1294 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1295 }
1296 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1297 }
1298 }
1299 }
1300
1301 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1302 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1303
1304 // Adapters for compiled code are made eagerly here. They are fairly
1305 // small (generally < 100 bytes) and quick to make (and cached and shared)
1306 // so making them eagerly shouldn't be too expensive.
1307 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1308 if (adapter == nullptr ) {
1309 if (!is_init_completed()) {
1310 // Don't throw exceptions during VM initialization because java.lang.* classes
1311 // might not have been initialized, causing problems when constructing the
1312 // Java exception object.
1313 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1314 } else {
1315 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1316 }
1317 }
1318
1319 mh->set_adapter_entry(adapter);
1320 mh->_from_compiled_entry = adapter->get_c2i_entry();
1321 return adapter->get_c2i_entry();
1322 }
|
99 sizes,
100 method_type,
101 CHECK_NULL);
102 int size = Method::size(access_flags.is_native());
103 return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, name);
104 }
105
106 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
107 NoSafepointVerifier no_safepoint;
108 set_constMethod(xconst);
109 set_access_flags(access_flags);
110 set_intrinsic_id(vmIntrinsics::_none);
111 clear_method_data();
112 clear_method_counters();
113 set_vtable_index(Method::garbage_vtable_index);
114
115 // Fix and bury in Method*
116 set_interpreter_entry(nullptr); // sets i2i entry and from_int
117 set_adapter_entry(nullptr);
118 Method::clear_code(); // from_c/from_i get set to c2i/i2i
119 set_preload_code(nullptr);
120 set_aot_code_entry(nullptr);
121
122 if (access_flags.is_native()) {
123 clear_native_function();
124 set_signature_handler(nullptr);
125 }
126
127 NOT_PRODUCT(set_compiled_invocation_count(0);)
128 // Name is very useful for debugging.
129 NOT_PRODUCT(_name = name;)
130 }
131
132 // Release Method*. The nmethod will be gone when we get here because
133 // we've walked the code cache.
134 void Method::deallocate_contents(ClassLoaderData* loader_data) {
135 MetadataFactory::free_metadata(loader_data, constMethod());
136 set_constMethod(nullptr);
137 MetadataFactory::free_metadata(loader_data, method_data());
138 clear_method_data();
139 MetadataFactory::free_metadata(loader_data, method_counters());
140 clear_method_counters();
141 set_adapter_entry(nullptr);
142 // The nmethod will be gone when we get here.
143 if (code() != nullptr) _code = nullptr;
144 if (aot_code_entry() != nullptr) {
145 set_preload_code(nullptr);
146 AOTCodeCache::invalidate(aot_code_entry());
147 set_aot_code_entry(nullptr);
148 }
149 }
150
151 void Method::release_C_heap_structures() {
152 if (method_data()) {
153 method_data()->release_C_heap_structures();
154
155 // Destroy MethodData embedded lock
156 method_data()->~MethodData();
157 }
158 }
159
160 address Method::get_i2c_entry() {
161 assert(adapter() != nullptr, "must have");
162 return adapter()->get_i2c_entry();
163 }
164
165 address Method::get_c2i_entry() {
166 assert(adapter() != nullptr, "must have");
167 return adapter()->get_c2i_entry();
168 }
173 }
174
175 address Method::get_c2i_no_clinit_check_entry() {
176 assert(VM_Version::supports_fast_class_init_checks(), "");
177 assert(adapter() != nullptr, "must have");
178 return adapter()->get_c2i_no_clinit_check_entry();
179 }
180
181 char* Method::name_and_sig_as_C_string() const {
182 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
183 }
184
185 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
186 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
187 }
188
189 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
190 const char* klass_name = klass->external_name();
191 int klass_name_len = (int)strlen(klass_name);
192 int method_name_len = method_name->utf8_length();
193 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
194 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
195 strcpy(dest, klass_name);
196 dest[klass_name_len + 0] = ':';
197 dest[klass_name_len + 1] = ':';
198 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
199 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
200 dest[len] = 0;
201 return dest;
202 }
203
204 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
205 Symbol* klass_name = klass->name();
206 klass_name->as_klass_external_name(buf, size);
207 int len = (int)strlen(buf);
208
209 if (len < size - 1) {
210 buf[len++] = '.';
211
212 method_name->as_C_string(&(buf[len]), size - len);
213 len = (int)strlen(buf);
214
215 signature->as_C_string(&(buf[len]), size - len);
216 }
217
218 return buf;
219 }
379 address Method::bcp_from(address bcp) const {
380 if (is_native() && bcp == nullptr) {
381 return code_base();
382 } else {
383 return bcp;
384 }
385 }
386
387 int Method::size(bool is_native) {
388 // If native, then include pointers for native_function and signature_handler
389 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
390 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
391 return align_metadata_size(header_size() + extra_words);
392 }
393
394 Symbol* Method::klass_name() const {
395 return method_holder()->name();
396 }
397
398 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
399 LogStreamHandle(Trace, aot) lsh;
400 if (lsh.is_enabled()) {
401 lsh.print("Iter(Method): %p ", this);
402 print_external_name(&lsh);
403 lsh.cr();
404 }
405 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
406 // holder is null for MH intrinsic methods
407 it->push(&_constMethod, MetaspaceClosure::_writable);
408 } else {
409 it->push(&_constMethod);
410 }
411 it->push(&_adapter);
412 it->push(&_method_data);
413 it->push(&_method_counters);
414 NOT_PRODUCT(it->push(&_name);)
415 }
416
417 #if INCLUDE_CDS
418 // Attempt to return method to original state. Clear any pointers
419 // (to objects outside the shared spaces). We won't be able to predict
420 // where they should point in a new JVM. Further initialize some
421 // entries now in order allow them to be write protected later.
422
423 void Method::remove_unshareable_info() {
424 unlink_method();
425 if (method_data() != nullptr) {
426 method_data()->remove_unshareable_info();
427 }
428 if (method_counters() != nullptr) {
429 method_counters()->remove_unshareable_info();
430 }
431 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
432 _adapter->remove_unshareable_info();
433 _adapter = nullptr;
434 }
435 if (method_data() != nullptr) {
436 method_data()->remove_unshareable_info();
437 }
438 if (method_counters() != nullptr) {
439 method_counters()->remove_unshareable_info();
440 }
441 JFR_ONLY(REMOVE_METHOD_ID(this);)
442 }
443
444 void Method::restore_unshareable_info(TRAPS) {
445 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
446 if (method_data() != nullptr) {
447 method_data()->restore_unshareable_info(CHECK);
448 }
449 if (method_counters() != nullptr) {
450 method_counters()->restore_unshareable_info(CHECK);
451 }
452 if (_adapter != nullptr) {
453 assert(_adapter->is_linked(), "must be");
454 _from_compiled_entry = _adapter->get_c2i_entry();
455 }
456 if (method_data() != nullptr) {
457 method_data()->restore_unshareable_info(CHECK);
458 }
459 if (method_counters() != nullptr) {
460 method_counters()->restore_unshareable_info(CHECK);
461 }
462 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
463 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
464 }
465 #endif
466
467 void Method::set_vtable_index(int index) {
468 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
469 // At runtime initialize_vtable is rerun as part of link_class_impl()
470 // for a shared class loaded by the non-boot loader to obtain the loader
471 // constraints based on the runtime classloaders' context.
472 return; // don't write into the shared class
473 } else {
474 _vtable_index = index;
475 }
476 }
477
478 void Method::set_itable_index(int index) {
479 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
480 // At runtime initialize_itable is rerun as part of link_class_impl()
481 // for a shared class loaded by the non-boot loader to obtain the loader
482 // constraints based on the runtime classloaders' context. The dumptime
483 // itable index should be the same as the runtime index.
667 // Do not profile the method if metaspace has hit an OOM previously
668 // allocating profiling data. Callers clear pending exception so don't
669 // add one here.
670 if (ClassLoaderDataGraph::has_metaspace_oom()) {
671 return;
672 }
673
674 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
675 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
676 if (HAS_PENDING_EXCEPTION) {
677 CompileBroker::log_metaspace_failure();
678 ClassLoaderDataGraph::set_metaspace_oom(true);
679 return; // return the exception (which is cleared)
680 }
681
682 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
683 MetadataFactory::free_metadata(loader_data, method_data);
684 return;
685 }
686
687 if (ForceProfiling && TrainingData::need_data()) {
688 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
689 guarantee(mtd != nullptr, "");
690 }
691
692 if (PrintMethodData) {
693 ResourceMark rm(THREAD);
694 tty->print("build_profiling_method_data for ");
695 method->print_name(tty);
696 tty->cr();
697 // At the end of the run, the MDO, full of data, will be dumped.
698 }
699 }
700
701 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
702 // Do not profile the method if metaspace has hit an OOM previously
703 if (ClassLoaderDataGraph::has_metaspace_oom()) {
704 return nullptr;
705 }
706
707 methodHandle mh(current, m);
708 MethodCounters* counters;
709 if (current->is_Java_thread()) {
710 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
711 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
712 // if needed.
713 counters = MethodCounters::allocate_with_exception(mh, THREAD);
714 if (HAS_PENDING_EXCEPTION) {
715 CLEAR_PENDING_EXCEPTION;
716 }
717 } else {
718 // Call metaspace allocation that doesn't throw exception if the
719 // current thread isn't a JavaThread, ie. the VMThread.
720 counters = MethodCounters::allocate_no_exception(mh);
721 }
722
723 if (counters == nullptr) {
724 CompileBroker::log_metaspace_failure();
725 ClassLoaderDataGraph::set_metaspace_oom(true);
726 return nullptr;
727 }
728
729 if (!mh->init_method_counters(counters)) {
730 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
731 }
732
733 if (ForceProfiling && TrainingData::need_data()) {
734 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
735 guarantee(mtd != nullptr, "");
736 }
737
738 return mh->method_counters();
739 }
740
741 bool Method::init_method_counters(MethodCounters* counters) {
742 // Try to install a pointer to MethodCounters, return true on success.
743 return Atomic::replace_if_null(&_method_counters, counters);
744 }
745
746 void Method::set_exception_handler_entered(int handler_bci) {
747 if (ProfileExceptionHandlers) {
748 MethodData* mdo = method_data();
749 if (mdo != nullptr) {
750 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
751 handler_data.set_exception_handler_entered();
752 }
753 }
754 }
755
756 int Method::extra_stack_words() {
757 // not an inline function, to avoid a header dependency on Interpreter
944 return (is_static() ||
945 method_holder()->major_version() < 51);
946 }
947
948 bool Method::is_static_initializer() const {
949 // For classfiles version 51 or greater, ensure that the clinit method is
950 // static. Non-static methods with the name "<clinit>" are not static
951 // initializers. (older classfiles exempted for backward compatibility)
952 return name() == vmSymbols::class_initializer_name() &&
953 has_valid_initializer_flags();
954 }
955
956 bool Method::is_object_initializer() const {
957 return name() == vmSymbols::object_initializer_name();
958 }
959
960 bool Method::needs_clinit_barrier() const {
961 return is_static() && !method_holder()->is_initialized();
962 }
963
964 bool Method::code_has_clinit_barriers() const {
965 nmethod* nm = code();
966 return (nm != nullptr) && nm->has_clinit_barriers();
967 }
968
969 bool Method::is_object_wait0() const {
970 return klass_name() == vmSymbols::java_lang_Object()
971 && name() == vmSymbols::wait_name();
972 }
973
974 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
975 int length = method->checked_exceptions_length();
976 if (length == 0) { // common case
977 return objArrayHandle(THREAD, Universe::the_empty_class_array());
978 } else {
979 methodHandle h_this(THREAD, method);
980 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
981 objArrayHandle mirrors (THREAD, m_oop);
982 for (int i = 0; i < length; i++) {
983 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
984 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
985 if (log_is_enabled(Warning, exceptions) &&
986 !k->is_subclass_of(vmClasses::Throwable_klass())) {
987 ResourceMark rm(THREAD);
988 log_warning(exceptions)(
1239 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1240 _adapter = nullptr;
1241 }
1242 _i2i_entry = nullptr;
1243 _from_compiled_entry = nullptr;
1244 _from_interpreted_entry = nullptr;
1245
1246 if (is_native()) {
1247 *native_function_addr() = nullptr;
1248 set_signature_handler(nullptr);
1249 }
1250 NOT_PRODUCT(set_compiled_invocation_count(0);)
1251
1252 clear_method_data();
1253 clear_method_counters();
1254 clear_is_not_c1_compilable();
1255 clear_is_not_c1_osr_compilable();
1256 clear_is_not_c2_compilable();
1257 clear_is_not_c2_osr_compilable();
1258 clear_queued_for_compilation();
1259 set_pending_queue_processed(false);
1260
1261 remove_unshareable_flags();
1262 }
1263
1264 void Method::remove_unshareable_flags() {
1265 // clear all the flags that shouldn't be in the archived version
1266 assert(!is_old(), "must be");
1267 assert(!is_obsolete(), "must be");
1268 assert(!is_deleted(), "must be");
1269
1270 set_is_prefixed_native(false);
1271 set_queued_for_compilation(false);
1272 set_pending_queue_processed(false);
1273 set_is_not_c2_compilable(false);
1274 set_is_not_c1_compilable(false);
1275 set_is_not_c2_osr_compilable(false);
1276 set_on_stack_flag(false);
1277 set_has_upcall_on_method_entry(false);
1278 set_has_upcall_on_method_exit(false);
1279 }
1280 #endif
1281
1282 // Called when the method_holder is getting linked. Setup entrypoints so the method
1283 // is ready to be called from interpreter, compiler, and vtables.
1284 void Method::link_method(const methodHandle& h_method, TRAPS) {
1285 if (log_is_enabled(Info, perf, class, link)) {
1286 ClassLoader::perf_ik_link_methods_count()->inc();
1287 }
1288
1289 // If the code cache is full, we may reenter this function for the
1290 // leftover methods that weren't linked.
1291 if (adapter() != nullptr) {
1292 if (adapter()->is_shared()) {
1293 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1294 } else {
1295 return;
1296 }
1297 }
1298 assert( _code == nullptr, "nothing compiled yet" );
1324 if (_adapter == nullptr) {
1325 (void) make_adapters(h_method, CHECK);
1326 assert(adapter()->is_linked(), "Adapter must have been linked");
1327 }
1328
1329 // ONLY USE the h_method now as make_adapter may have blocked
1330
1331 if (h_method->is_continuation_native_intrinsic()) {
1332 _from_interpreted_entry = nullptr;
1333 _from_compiled_entry = nullptr;
1334 _i2i_entry = nullptr;
1335 if (Continuations::enabled()) {
1336 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1337 AdapterHandlerLibrary::create_native_wrapper(h_method);
1338 if (!h_method->has_compiled_code()) {
1339 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1340 }
1341 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1342 }
1343 }
1344 if (_preload_code != nullptr && !_aot_code_entry->not_entrant()) {
1345 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1346 set_code(h_method, _preload_code);
1347 assert(((nmethod*)_preload_code)->aot_code_entry() == _aot_code_entry, "sanity");
1348 }
1349 }
1350
1351 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1352 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1353
1354 // Adapters for compiled code are made eagerly here. They are fairly
1355 // small (generally < 100 bytes) and quick to make (and cached and shared)
1356 // so making them eagerly shouldn't be too expensive.
1357 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1358 if (adapter == nullptr ) {
1359 if (!is_init_completed()) {
1360 // Don't throw exceptions during VM initialization because java.lang.* classes
1361 // might not have been initialized, causing problems when constructing the
1362 // Java exception object.
1363 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1364 } else {
1365 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1366 }
1367 }
1368
1369 mh->set_adapter_entry(adapter);
1370 mh->_from_compiled_entry = adapter->get_c2i_entry();
1371 return adapter->get_c2i_entry();
1372 }
|