42 #include "interpreter/oopMapCache.hpp"
43 #include "logging/log.hpp"
44 #include "logging/logStream.hpp"
45 #include "logging/logTag.hpp"
46 #include "memory/allocation.inline.hpp"
47 #include "memory/metadataFactory.hpp"
48 #include "memory/metaspaceClosure.hpp"
49 #include "memory/oopFactory.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "memory/universe.hpp"
52 #include "nmt/memTracker.hpp"
53 #include "oops/constMethod.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/klass.inline.hpp"
56 #include "oops/method.inline.hpp"
57 #include "oops/methodData.hpp"
58 #include "oops/objArrayKlass.hpp"
59 #include "oops/objArrayOop.inline.hpp"
60 #include "oops/oop.inline.hpp"
61 #include "oops/symbol.hpp"
62 #include "prims/jvmtiExport.hpp"
63 #include "prims/methodHandles.hpp"
64 #include "runtime/atomic.hpp"
65 #include "runtime/arguments.hpp"
66 #include "runtime/continuationEntry.hpp"
67 #include "runtime/frame.inline.hpp"
68 #include "runtime/handles.inline.hpp"
69 #include "runtime/init.hpp"
70 #include "runtime/java.hpp"
71 #include "runtime/orderAccess.hpp"
72 #include "runtime/perfData.hpp"
73 #include "runtime/relocator.hpp"
74 #include "runtime/safepointVerifiers.hpp"
75 #include "runtime/sharedRuntime.hpp"
76 #include "runtime/signature.hpp"
77 #include "runtime/threads.hpp"
78 #include "runtime/vm_version.hpp"
79 #include "utilities/align.hpp"
80 #include "utilities/quickSort.hpp"
81 #include "utilities/vmError.hpp"
163 }
164
165 address Method::get_c2i_no_clinit_check_entry() {
166 assert(VM_Version::supports_fast_class_init_checks(), "");
167 assert(adapter() != nullptr, "must have");
168 return adapter()->get_c2i_no_clinit_check_entry();
169 }
170
171 char* Method::name_and_sig_as_C_string() const {
172 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
173 }
174
175 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
176 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
177 }
178
179 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
180 const char* klass_name = klass->external_name();
181 int klass_name_len = (int)strlen(klass_name);
182 int method_name_len = method_name->utf8_length();
183 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
184 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
185 strcpy(dest, klass_name);
186 dest[klass_name_len] = '.';
187 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
188 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
189 dest[len] = 0;
190 return dest;
191 }
192
193 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
194 Symbol* klass_name = klass->name();
195 klass_name->as_klass_external_name(buf, size);
196 int len = (int)strlen(buf);
197
198 if (len < size - 1) {
199 buf[len++] = '.';
200
201 method_name->as_C_string(&(buf[len]), size - len);
202 len = (int)strlen(buf);
203
204 signature->as_C_string(&(buf[len]), size - len);
205 }
206
207 return buf;
208 }
368 address Method::bcp_from(address bcp) const {
369 if (is_native() && bcp == nullptr) {
370 return code_base();
371 } else {
372 return bcp;
373 }
374 }
375
376 int Method::size(bool is_native) {
377 // If native, then include pointers for native_function and signature_handler
378 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
379 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
380 return align_metadata_size(header_size() + extra_words);
381 }
382
383 Symbol* Method::klass_name() const {
384 return method_holder()->name();
385 }
386
387 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
388 log_trace(cds)("Iter(Method): %p", this);
389
390 if (!method_holder()->is_rewritten()) {
391 it->push(&_constMethod, MetaspaceClosure::_writable);
392 } else {
393 it->push(&_constMethod);
394 }
395 it->push(&_method_data);
396 it->push(&_method_counters);
397 NOT_PRODUCT(it->push(&_name);)
398 }
399
400 #if INCLUDE_CDS
401 // Attempt to return method to original state. Clear any pointers
402 // (to objects outside the shared spaces). We won't be able to predict
403 // where they should point in a new JVM. Further initialize some
404 // entries now in order allow them to be write protected later.
405
406 void Method::remove_unshareable_info() {
407 unlink_method();
408 JFR_ONLY(REMOVE_METHOD_ID(this);)
409 }
410
411 void Method::restore_unshareable_info(TRAPS) {
412 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
413 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
414 }
415 #endif
416
417 void Method::set_vtable_index(int index) {
418 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
419 // At runtime initialize_vtable is rerun as part of link_class_impl()
420 // for a shared class loaded by the non-boot loader to obtain the loader
421 // constraints based on the runtime classloaders' context.
422 return; // don't write into the shared class
423 } else {
424 _vtable_index = index;
425 }
426 }
427
428 void Method::set_itable_index(int index) {
429 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
430 // At runtime initialize_itable is rerun as part of link_class_impl()
431 // for a shared class loaded by the non-boot loader to obtain the loader
432 // constraints based on the runtime classloaders' context. The dumptime
433 // itable index should be the same as the runtime index.
560 // Counting based on signed int counters tends to overflow with
561 // longer-running workloads on fast machines. The counters under
562 // consideration here, however, are limited in range by counting
563 // logic. See InvocationCounter:count_limit for example.
564 // No "overflow precautions" need to be implemented here.
565 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
566 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
567 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
568
569 if (method_data() != nullptr) {
570 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
571 }
572
573 #ifndef PRODUCT
574 if (CountCompiledCalls) {
575 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
576 }
577 #endif
578 }
579
580 // Build a MethodData* object to hold profiling information collected on this
581 // method when requested.
582 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
583 // Do not profile the method if metaspace has hit an OOM previously
584 // allocating profiling data. Callers clear pending exception so don't
585 // add one here.
586 if (ClassLoaderDataGraph::has_metaspace_oom()) {
587 return;
588 }
589
590 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
591 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
592 if (HAS_PENDING_EXCEPTION) {
593 CompileBroker::log_metaspace_failure();
594 ClassLoaderDataGraph::set_metaspace_oom(true);
595 return; // return the exception (which is cleared)
596 }
597
598 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
599 MetadataFactory::free_metadata(loader_data, method_data);
600 return;
601 }
602
603 if (PrintMethodData && (Verbose || WizardMode)) {
604 ResourceMark rm(THREAD);
605 tty->print("build_profiling_method_data for ");
606 method->print_name(tty);
607 tty->cr();
608 // At the end of the run, the MDO, full of data, will be dumped.
609 }
610 }
611
612 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
613 // Do not profile the method if metaspace has hit an OOM previously
614 if (ClassLoaderDataGraph::has_metaspace_oom()) {
615 return nullptr;
616 }
617
618 methodHandle mh(current, m);
619 MethodCounters* counters;
620 if (current->is_Java_thread()) {
621 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
622 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
623 // if needed.
624 counters = MethodCounters::allocate_with_exception(mh, THREAD);
625 if (HAS_PENDING_EXCEPTION) {
626 CLEAR_PENDING_EXCEPTION;
627 }
628 } else {
629 // Call metaspace allocation that doesn't throw exception if the
630 // current thread isn't a JavaThread, ie. the VMThread.
631 counters = MethodCounters::allocate_no_exception(mh);
632 }
633
634 if (counters == nullptr) {
635 CompileBroker::log_metaspace_failure();
636 ClassLoaderDataGraph::set_metaspace_oom(true);
637 return nullptr;
638 }
639
640 if (!mh->init_method_counters(counters)) {
641 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
642 }
643
644 return mh->method_counters();
645 }
646
647 bool Method::init_method_counters(MethodCounters* counters) {
648 // Try to install a pointer to MethodCounters, return true on success.
649 return Atomic::replace_if_null(&_method_counters, counters);
650 }
651
652 void Method::set_exception_handler_entered(int handler_bci) {
653 if (ProfileExceptionHandlers) {
654 MethodData* mdo = method_data();
655 if (mdo != nullptr) {
656 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
657 handler_data.set_exception_handler_entered();
658 }
659 }
660 }
661
662 int Method::extra_stack_words() {
663 // not an inline function, to avoid a header dependency on Interpreter
850 return (is_static() ||
851 method_holder()->major_version() < 51);
852 }
853
854 bool Method::is_static_initializer() const {
855 // For classfiles version 51 or greater, ensure that the clinit method is
856 // static. Non-static methods with the name "<clinit>" are not static
857 // initializers. (older classfiles exempted for backward compatibility)
858 return name() == vmSymbols::class_initializer_name() &&
859 has_valid_initializer_flags();
860 }
861
862 bool Method::is_object_initializer() const {
863 return name() == vmSymbols::object_initializer_name();
864 }
865
866 bool Method::needs_clinit_barrier() const {
867 return is_static() && !method_holder()->is_initialized();
868 }
869
870 bool Method::is_object_wait0() const {
871 return klass_name() == vmSymbols::java_lang_Object()
872 && name() == vmSymbols::wait_name();
873 }
874
875 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
876 int length = method->checked_exceptions_length();
877 if (length == 0) { // common case
878 return objArrayHandle(THREAD, Universe::the_empty_class_array());
879 } else {
880 methodHandle h_this(THREAD, method);
881 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
882 objArrayHandle mirrors (THREAD, m_oop);
883 for (int i = 0; i < length; i++) {
884 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
885 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
886 if (log_is_enabled(Warning, exceptions) &&
887 !k->is_subclass_of(vmClasses::Throwable_klass())) {
888 ResourceMark rm(THREAD);
889 log_warning(exceptions)(
1120 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1121 // We need to check if either the _code or _from_compiled_code_entry_point
1122 // refer to this nmethod because there is a race in setting these two fields
1123 // in Method* as seen in bugid 4947125.
1124 if (code() == compare ||
1125 from_compiled_entry() == compare->verified_entry_point()) {
1126 clear_code();
1127 }
1128 }
1129
1130 void Method::unlink_code() {
1131 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1132 clear_code();
1133 }
1134
1135 #if INCLUDE_CDS
1136 // Called by class data sharing to remove any entry points (which are not shared)
1137 void Method::unlink_method() {
1138 assert(CDSConfig::is_dumping_archive(), "sanity");
1139 _code = nullptr;
1140 _adapter = nullptr;
1141 _i2i_entry = nullptr;
1142 _from_compiled_entry = nullptr;
1143 _from_interpreted_entry = nullptr;
1144
1145 if (is_native()) {
1146 *native_function_addr() = nullptr;
1147 set_signature_handler(nullptr);
1148 }
1149 NOT_PRODUCT(set_compiled_invocation_count(0);)
1150
1151 clear_method_data();
1152 clear_method_counters();
1153 remove_unshareable_flags();
1154 }
1155
1156 void Method::remove_unshareable_flags() {
1157 // clear all the flags that shouldn't be in the archived version
1158 assert(!is_old(), "must be");
1159 assert(!is_obsolete(), "must be");
1160 assert(!is_deleted(), "must be");
1161
1162 set_is_prefixed_native(false);
1163 set_queued_for_compilation(false);
1164 set_is_not_c2_compilable(false);
1165 set_is_not_c1_compilable(false);
1166 set_is_not_c2_osr_compilable(false);
1167 set_on_stack_flag(false);
1168 }
1169 #endif
1170
1171 // Called when the method_holder is getting linked. Setup entrypoints so the method
1172 // is ready to be called from interpreter, compiler, and vtables.
1173 void Method::link_method(const methodHandle& h_method, TRAPS) {
1174 if (log_is_enabled(Info, perf, class, link)) {
1175 ClassLoader::perf_ik_link_methods_count()->inc();
1176 }
1177
1178 // If the code cache is full, we may reenter this function for the
1179 // leftover methods that weren't linked.
1180 if (adapter() != nullptr) {
1181 return;
1182 }
1183 assert( _code == nullptr, "nothing compiled yet" );
1184
1185 // Setup interpreter entrypoint
1186 assert(this == h_method(), "wrong h_method()" );
1187
1188 assert(adapter() == nullptr, "init'd to null");
1189 address entry = Interpreter::entry_for_method(h_method);
1190 assert(entry != nullptr, "interpreter entry must be non-null");
1191 // Sets both _i2i_entry and _from_interpreted_entry
1192 set_interpreter_entry(entry);
1193
1194 // Don't overwrite already registered native entries.
1195 if (is_native() && !has_native_function()) {
1196 set_native_function(
1197 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1198 !native_bind_event_is_interesting);
1199 }
1200
1201 // Setup compiler entrypoint. This is made eagerly, so we do not need
1202 // special handling of vtables. An alternative is to make adapters more
1203 // lazily by calling make_adapter() from from_compiled_entry() for the
1204 // normal calls. For vtable calls life gets more complicated. When a
1205 // call-site goes mega-morphic we need adapters in all methods which can be
1206 // called from the vtable. We need adapters on such methods that get loaded
1207 // later. Ditto for mega-morphic itable calls. If this proves to be a
1208 // problem we'll make these lazily later.
1209 (void) make_adapters(h_method, CHECK);
1210
1211 // ONLY USE the h_method now as make_adapter may have blocked
1212
1213 if (h_method->is_continuation_native_intrinsic()) {
1214 _from_interpreted_entry = nullptr;
1215 _from_compiled_entry = nullptr;
1216 _i2i_entry = nullptr;
1217 if (Continuations::enabled()) {
1218 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1219 AdapterHandlerLibrary::create_native_wrapper(h_method);
1220 if (!h_method->has_compiled_code()) {
1221 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1222 }
1223 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1224 }
1225 }
1226 }
1227
1228 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1229 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1230
1231 // Adapters for compiled code are made eagerly here. They are fairly
1232 // small (generally < 100 bytes) and quick to make (and cached and shared)
1233 // so making them eagerly shouldn't be too expensive.
1234 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1235 if (adapter == nullptr ) {
1236 if (!is_init_completed()) {
1237 // Don't throw exceptions during VM initialization because java.lang.* classes
1238 // might not have been initialized, causing problems when constructing the
1239 // Java exception object.
1240 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1241 } else {
1242 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1243 }
1244 }
1245
1246 mh->set_adapter_entry(adapter);
1247 mh->_from_compiled_entry = adapter->get_c2i_entry();
1248 return adapter->get_c2i_entry();
1249 }
1465
1466 // Finally, set up its entry points.
1467 assert(m->can_be_statically_bound(), "");
1468 m->set_vtable_index(Method::nonvirtual_vtable_index);
1469 m->link_method(m, CHECK_(empty));
1470
1471 if (iid == vmIntrinsics::_linkToNative) {
1472 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1473 }
1474 if (log_is_enabled(Debug, methodhandles)) {
1475 LogTarget(Debug, methodhandles) lt;
1476 LogStream ls(lt);
1477 m->print_on(&ls);
1478 }
1479
1480 return m;
1481 }
1482
1483 #if INCLUDE_CDS
1484 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1485 m->link_method(m, CHECK);
1486
1487 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1488 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1489 }
1490 }
1491 #endif
1492
1493 Klass* Method::check_non_bcp_klass(Klass* klass) {
1494 if (klass != nullptr && klass->class_loader() != nullptr) {
1495 if (klass->is_objArray_klass())
1496 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1497 return klass;
1498 }
1499 return nullptr;
1500 }
1501
1502
1503 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1504 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
|
42 #include "interpreter/oopMapCache.hpp"
43 #include "logging/log.hpp"
44 #include "logging/logStream.hpp"
45 #include "logging/logTag.hpp"
46 #include "memory/allocation.inline.hpp"
47 #include "memory/metadataFactory.hpp"
48 #include "memory/metaspaceClosure.hpp"
49 #include "memory/oopFactory.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "memory/universe.hpp"
52 #include "nmt/memTracker.hpp"
53 #include "oops/constMethod.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/klass.inline.hpp"
56 #include "oops/method.inline.hpp"
57 #include "oops/methodData.hpp"
58 #include "oops/objArrayKlass.hpp"
59 #include "oops/objArrayOop.inline.hpp"
60 #include "oops/oop.inline.hpp"
61 #include "oops/symbol.hpp"
62 #include "oops/trainingData.hpp"
63 #include "prims/jvmtiExport.hpp"
64 #include "prims/methodHandles.hpp"
65 #include "runtime/atomic.hpp"
66 #include "runtime/arguments.hpp"
67 #include "runtime/continuationEntry.hpp"
68 #include "runtime/frame.inline.hpp"
69 #include "runtime/handles.inline.hpp"
70 #include "runtime/init.hpp"
71 #include "runtime/java.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/perfData.hpp"
74 #include "runtime/relocator.hpp"
75 #include "runtime/safepointVerifiers.hpp"
76 #include "runtime/sharedRuntime.hpp"
77 #include "runtime/signature.hpp"
78 #include "runtime/threads.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "utilities/align.hpp"
81 #include "utilities/quickSort.hpp"
82 #include "utilities/vmError.hpp"
164 }
165
166 address Method::get_c2i_no_clinit_check_entry() {
167 assert(VM_Version::supports_fast_class_init_checks(), "");
168 assert(adapter() != nullptr, "must have");
169 return adapter()->get_c2i_no_clinit_check_entry();
170 }
171
172 char* Method::name_and_sig_as_C_string() const {
173 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
174 }
175
176 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
177 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
178 }
179
180 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
181 const char* klass_name = klass->external_name();
182 int klass_name_len = (int)strlen(klass_name);
183 int method_name_len = method_name->utf8_length();
184 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
185 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
186 strcpy(dest, klass_name);
187 dest[klass_name_len + 0] = ':';
188 dest[klass_name_len + 1] = ':';
189 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
190 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
191 dest[len] = 0;
192 return dest;
193 }
194
195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
196 Symbol* klass_name = klass->name();
197 klass_name->as_klass_external_name(buf, size);
198 int len = (int)strlen(buf);
199
200 if (len < size - 1) {
201 buf[len++] = '.';
202
203 method_name->as_C_string(&(buf[len]), size - len);
204 len = (int)strlen(buf);
205
206 signature->as_C_string(&(buf[len]), size - len);
207 }
208
209 return buf;
210 }
370 address Method::bcp_from(address bcp) const {
371 if (is_native() && bcp == nullptr) {
372 return code_base();
373 } else {
374 return bcp;
375 }
376 }
377
378 int Method::size(bool is_native) {
379 // If native, then include pointers for native_function and signature_handler
380 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
381 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
382 return align_metadata_size(header_size() + extra_words);
383 }
384
385 Symbol* Method::klass_name() const {
386 return method_holder()->name();
387 }
388
389 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
390 LogStreamHandle(Trace, cds) lsh;
391 if (lsh.is_enabled()) {
392 lsh.print("Iter(Method): %p ", this);
393 print_external_name(&lsh);
394 lsh.cr();
395 }
396 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
397 // holder is null for MH intrinsic methods
398 it->push(&_constMethod, MetaspaceClosure::_writable);
399 } else {
400 it->push(&_constMethod);
401 }
402 if (CDSConfig::is_dumping_adapters()) {
403 it->push(&_adapter);
404 }
405 it->push(&_method_data);
406 it->push(&_method_counters);
407 NOT_PRODUCT(it->push(&_name);)
408 }
409
410 #if INCLUDE_CDS
411 // Attempt to return method to original state. Clear any pointers
412 // (to objects outside the shared spaces). We won't be able to predict
413 // where they should point in a new JVM. Further initialize some
414 // entries now in order allow them to be write protected later.
415
416 void Method::remove_unshareable_info() {
417 unlink_method();
418 if (CDSConfig::is_dumping_adapters()) {
419 if (_adapter != nullptr) {
420 _adapter->remove_unshareable_info();
421 }
422 } else {
423 _adapter = nullptr;
424 }
425 if (method_data() != nullptr) {
426 method_data()->remove_unshareable_info();
427 }
428 if (method_counters() != nullptr) {
429 method_counters()->remove_unshareable_info();
430 }
431 JFR_ONLY(REMOVE_METHOD_ID(this);)
432 }
433
434 void Method::restore_adapter(TRAPS) {
435 if (_adapter != nullptr) {
436 _adapter->restore_unshareable_info(CHECK);
437 _from_compiled_entry = _adapter->get_c2i_entry();
438 }
439 }
440
441 void Method::restore_unshareable_info(TRAPS) {
442 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
443 restore_adapter(CHECK);
444 if (method_data() != nullptr) {
445 method_data()->restore_unshareable_info(CHECK);
446 }
447 if (method_counters() != nullptr) {
448 method_counters()->restore_unshareable_info(CHECK);
449 }
450 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
451 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
452 }
453 #endif
454
455 void Method::set_vtable_index(int index) {
456 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
457 // At runtime initialize_vtable is rerun as part of link_class_impl()
458 // for a shared class loaded by the non-boot loader to obtain the loader
459 // constraints based on the runtime classloaders' context.
460 return; // don't write into the shared class
461 } else {
462 _vtable_index = index;
463 }
464 }
465
466 void Method::set_itable_index(int index) {
467 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
468 // At runtime initialize_itable is rerun as part of link_class_impl()
469 // for a shared class loaded by the non-boot loader to obtain the loader
470 // constraints based on the runtime classloaders' context. The dumptime
471 // itable index should be the same as the runtime index.
598 // Counting based on signed int counters tends to overflow with
599 // longer-running workloads on fast machines. The counters under
600 // consideration here, however, are limited in range by counting
601 // logic. See InvocationCounter:count_limit for example.
602 // No "overflow precautions" need to be implemented here.
603 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
604 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
605 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
606
607 if (method_data() != nullptr) {
608 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
609 }
610
611 #ifndef PRODUCT
612 if (CountCompiledCalls) {
613 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
614 }
615 #endif
616 }
617
618 MethodTrainingData* Method::training_data_or_null() const {
619 MethodCounters* mcs = method_counters();
620 if (mcs == nullptr) {
621 return nullptr;
622 } else {
623 MethodTrainingData* mtd = mcs->method_training_data();
624 if (mtd == mcs->method_training_data_sentinel()) {
625 return nullptr;
626 }
627 return mtd;
628 }
629 }
630
631 bool Method::init_training_data(MethodTrainingData* td) {
632 MethodCounters* mcs = method_counters();
633 if (mcs == nullptr) {
634 return false;
635 } else {
636 return mcs->init_method_training_data(td);
637 }
638 }
639
640 bool Method::install_training_method_data(const methodHandle& method) {
641 MethodTrainingData* mtd = MethodTrainingData::find(method);
642 if (mtd != nullptr && mtd->final_profile() != nullptr) {
643 Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
644 return true;
645 }
646 return false;
647 }
648
649 // Build a MethodData* object to hold profiling information collected on this
650 // method when requested.
651 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
652 if (install_training_method_data(method)) {
653 return;
654 }
655 // Do not profile the method if metaspace has hit an OOM previously
656 // allocating profiling data. Callers clear pending exception so don't
657 // add one here.
658 if (ClassLoaderDataGraph::has_metaspace_oom()) {
659 return;
660 }
661
662 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
663 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
664 if (HAS_PENDING_EXCEPTION) {
665 CompileBroker::log_metaspace_failure();
666 ClassLoaderDataGraph::set_metaspace_oom(true);
667 return; // return the exception (which is cleared)
668 }
669
670 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
671 MetadataFactory::free_metadata(loader_data, method_data);
672 return;
673 }
674
675 if (ForceProfiling && TrainingData::need_data()) {
676 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
677 guarantee(mtd != nullptr, "");
678 }
679
680 if (PrintMethodData) {
681 ResourceMark rm(THREAD);
682 tty->print("build_profiling_method_data for ");
683 method->print_name(tty);
684 tty->cr();
685 // At the end of the run, the MDO, full of data, will be dumped.
686 }
687 }
688
689 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
690 // Do not profile the method if metaspace has hit an OOM previously
691 if (ClassLoaderDataGraph::has_metaspace_oom()) {
692 return nullptr;
693 }
694
695 methodHandle mh(current, m);
696 MethodCounters* counters;
697 if (current->is_Java_thread()) {
698 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
699 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
700 // if needed.
701 counters = MethodCounters::allocate_with_exception(mh, THREAD);
702 if (HAS_PENDING_EXCEPTION) {
703 CLEAR_PENDING_EXCEPTION;
704 }
705 } else {
706 // Call metaspace allocation that doesn't throw exception if the
707 // current thread isn't a JavaThread, ie. the VMThread.
708 counters = MethodCounters::allocate_no_exception(mh);
709 }
710
711 if (counters == nullptr) {
712 CompileBroker::log_metaspace_failure();
713 ClassLoaderDataGraph::set_metaspace_oom(true);
714 return nullptr;
715 }
716
717 if (!mh->init_method_counters(counters)) {
718 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
719 }
720
721 if (ForceProfiling && TrainingData::need_data()) {
722 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
723 guarantee(mtd != nullptr, "");
724 }
725
726 return mh->method_counters();
727 }
728
729 bool Method::init_method_counters(MethodCounters* counters) {
730 // Try to install a pointer to MethodCounters, return true on success.
731 return Atomic::replace_if_null(&_method_counters, counters);
732 }
733
734 void Method::set_exception_handler_entered(int handler_bci) {
735 if (ProfileExceptionHandlers) {
736 MethodData* mdo = method_data();
737 if (mdo != nullptr) {
738 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
739 handler_data.set_exception_handler_entered();
740 }
741 }
742 }
743
744 int Method::extra_stack_words() {
745 // not an inline function, to avoid a header dependency on Interpreter
932 return (is_static() ||
933 method_holder()->major_version() < 51);
934 }
935
936 bool Method::is_static_initializer() const {
937 // For classfiles version 51 or greater, ensure that the clinit method is
938 // static. Non-static methods with the name "<clinit>" are not static
939 // initializers. (older classfiles exempted for backward compatibility)
940 return name() == vmSymbols::class_initializer_name() &&
941 has_valid_initializer_flags();
942 }
943
944 bool Method::is_object_initializer() const {
945 return name() == vmSymbols::object_initializer_name();
946 }
947
948 bool Method::needs_clinit_barrier() const {
949 return is_static() && !method_holder()->is_initialized();
950 }
951
952 bool Method::code_has_clinit_barriers() const {
953 nmethod* nm = code();
954 return (nm != nullptr) && nm->has_clinit_barriers();
955 }
956
957 bool Method::is_object_wait0() const {
958 return klass_name() == vmSymbols::java_lang_Object()
959 && name() == vmSymbols::wait_name();
960 }
961
962 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
963 int length = method->checked_exceptions_length();
964 if (length == 0) { // common case
965 return objArrayHandle(THREAD, Universe::the_empty_class_array());
966 } else {
967 methodHandle h_this(THREAD, method);
968 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
969 objArrayHandle mirrors (THREAD, m_oop);
970 for (int i = 0; i < length; i++) {
971 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
972 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
973 if (log_is_enabled(Warning, exceptions) &&
974 !k->is_subclass_of(vmClasses::Throwable_klass())) {
975 ResourceMark rm(THREAD);
976 log_warning(exceptions)(
1207 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1208 // We need to check if either the _code or _from_compiled_code_entry_point
1209 // refer to this nmethod because there is a race in setting these two fields
1210 // in Method* as seen in bugid 4947125.
1211 if (code() == compare ||
1212 from_compiled_entry() == compare->verified_entry_point()) {
1213 clear_code();
1214 }
1215 }
1216
1217 void Method::unlink_code() {
1218 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1219 clear_code();
1220 }
1221
1222 #if INCLUDE_CDS
1223 // Called by class data sharing to remove any entry points (which are not shared)
1224 void Method::unlink_method() {
1225 assert(CDSConfig::is_dumping_archive(), "sanity");
1226 _code = nullptr;
1227 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1228 _adapter = nullptr;
1229 }
1230 _i2i_entry = nullptr;
1231 _from_compiled_entry = nullptr;
1232 _from_interpreted_entry = nullptr;
1233
1234 if (is_native()) {
1235 *native_function_addr() = nullptr;
1236 set_signature_handler(nullptr);
1237 }
1238 NOT_PRODUCT(set_compiled_invocation_count(0);)
1239
1240 clear_method_data();
1241 clear_method_counters();
1242 clear_is_not_c1_compilable();
1243 clear_is_not_c1_osr_compilable();
1244 clear_is_not_c2_compilable();
1245 clear_is_not_c2_osr_compilable();
1246 clear_queued_for_compilation();
1247 set_pending_queue_processed(false);
1248 remove_unshareable_flags();
1249 }
1250
1251 void Method::remove_unshareable_flags() {
1252 // clear all the flags that shouldn't be in the archived version
1253 assert(!is_old(), "must be");
1254 assert(!is_obsolete(), "must be");
1255 assert(!is_deleted(), "must be");
1256
1257 set_is_prefixed_native(false);
1258 set_queued_for_compilation(false);
1259 set_pending_queue_processed(false);
1260 set_is_not_c2_compilable(false);
1261 set_is_not_c1_compilable(false);
1262 set_is_not_c2_osr_compilable(false);
1263 set_on_stack_flag(false);
1264 set_has_upcall_on_method_entry(false);
1265 set_has_upcall_on_method_exit(false);
1266 }
1267 #endif
1268
1269 // Called when the method_holder is getting linked. Setup entrypoints so the method
1270 // is ready to be called from interpreter, compiler, and vtables.
1271 void Method::link_method(const methodHandle& h_method, TRAPS) {
1272 if (log_is_enabled(Info, perf, class, link)) {
1273 ClassLoader::perf_ik_link_methods_count()->inc();
1274 }
1275
1276 // If the code cache is full, we may reenter this function for the
1277 // leftover methods that weren't linked.
1278 if (adapter() != nullptr && !adapter()->is_shared()) {
1279 return;
1280 }
1281 assert( _code == nullptr, "nothing compiled yet" );
1282
1283 // Setup interpreter entrypoint
1284 assert(this == h_method(), "wrong h_method()" );
1285
1286 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache");
1287 address entry = Interpreter::entry_for_method(h_method);
1288 assert(entry != nullptr, "interpreter entry must be non-null");
1289 // Sets both _i2i_entry and _from_interpreted_entry
1290 set_interpreter_entry(entry);
1291
1292 // Don't overwrite already registered native entries.
1293 if (is_native() && !has_native_function()) {
1294 set_native_function(
1295 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1296 !native_bind_event_is_interesting);
1297 }
1298
1299 // Setup compiler entrypoint. This is made eagerly, so we do not need
1300 // special handling of vtables. An alternative is to make adapters more
1301 // lazily by calling make_adapter() from from_compiled_entry() for the
1302 // normal calls. For vtable calls life gets more complicated. When a
1303 // call-site goes mega-morphic we need adapters in all methods which can be
1304 // called from the vtable. We need adapters on such methods that get loaded
1305 // later. Ditto for mega-morphic itable calls. If this proves to be a
1306 // problem we'll make these lazily later.
1307 if (_adapter == nullptr) {
1308 (void) make_adapters(h_method, CHECK);
1309 }
1310
1311 // ONLY USE the h_method now as make_adapter may have blocked
1312
1313 if (h_method->is_continuation_native_intrinsic()) {
1314 _from_interpreted_entry = nullptr;
1315 _from_compiled_entry = nullptr;
1316 _i2i_entry = nullptr;
1317 if (Continuations::enabled()) {
1318 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1319 AdapterHandlerLibrary::create_native_wrapper(h_method);
1320 if (!h_method->has_compiled_code()) {
1321 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1322 }
1323 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1324 }
1325 }
1326 if (_preload_code != nullptr) {
1327 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1328 set_code(h_method, _preload_code);
1329 assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
1330 }
1331 }
1332
1333 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1334 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1335
1336 // Adapters for compiled code are made eagerly here. They are fairly
1337 // small (generally < 100 bytes) and quick to make (and cached and shared)
1338 // so making them eagerly shouldn't be too expensive.
1339 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1340 if (adapter == nullptr ) {
1341 if (!is_init_completed()) {
1342 // Don't throw exceptions during VM initialization because java.lang.* classes
1343 // might not have been initialized, causing problems when constructing the
1344 // Java exception object.
1345 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1346 } else {
1347 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1348 }
1349 }
1350
1351 mh->set_adapter_entry(adapter);
1352 mh->_from_compiled_entry = adapter->get_c2i_entry();
1353 return adapter->get_c2i_entry();
1354 }
1570
1571 // Finally, set up its entry points.
1572 assert(m->can_be_statically_bound(), "");
1573 m->set_vtable_index(Method::nonvirtual_vtable_index);
1574 m->link_method(m, CHECK_(empty));
1575
1576 if (iid == vmIntrinsics::_linkToNative) {
1577 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1578 }
1579 if (log_is_enabled(Debug, methodhandles)) {
1580 LogTarget(Debug, methodhandles) lt;
1581 LogStream ls(lt);
1582 m->print_on(&ls);
1583 }
1584
1585 return m;
1586 }
1587
1588 #if INCLUDE_CDS
1589 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1590 m->restore_adapter(CHECK);
1591 if (m->adapter() != nullptr) {
1592 m->adapter()->restore_unshareable_info(CHECK);
1593 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1594 }
1595 m->link_method(m, CHECK);
1596
1597 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1598 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1599 }
1600 }
1601 #endif
1602
1603 Klass* Method::check_non_bcp_klass(Klass* klass) {
1604 if (klass != nullptr && klass->class_loader() != nullptr) {
1605 if (klass->is_objArray_klass())
1606 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1607 return klass;
1608 }
1609 return nullptr;
1610 }
1611
1612
1613 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1614 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
|