43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/klass.inline.hpp"
57 #include "oops/method.inline.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/objArrayKlass.hpp"
60 #include "oops/objArrayOop.inline.hpp"
61 #include "oops/oop.inline.hpp"
62 #include "oops/symbol.hpp"
63 #include "prims/jvmtiExport.hpp"
64 #include "prims/methodHandles.hpp"
65 #include "runtime/atomic.hpp"
66 #include "runtime/arguments.hpp"
67 #include "runtime/continuationEntry.hpp"
68 #include "runtime/frame.inline.hpp"
69 #include "runtime/handles.inline.hpp"
70 #include "runtime/init.hpp"
71 #include "runtime/java.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/perfData.hpp"
74 #include "runtime/relocator.hpp"
75 #include "runtime/safepointVerifiers.hpp"
76 #include "runtime/sharedRuntime.hpp"
77 #include "runtime/signature.hpp"
78 #include "runtime/threads.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "utilities/align.hpp"
81 #include "utilities/quickSort.hpp"
82 #include "utilities/vmError.hpp"
164 }
165
166 address Method::get_c2i_no_clinit_check_entry() {
167 assert(VM_Version::supports_fast_class_init_checks(), "");
168 assert(adapter() != nullptr, "must have");
169 return adapter()->get_c2i_no_clinit_check_entry();
170 }
171
172 char* Method::name_and_sig_as_C_string() const {
173 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
174 }
175
176 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
177 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
178 }
179
180 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
181 const char* klass_name = klass->external_name();
182 int klass_name_len = (int)strlen(klass_name);
183 int method_name_len = method_name->utf8_length();
184 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
185 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
186 strcpy(dest, klass_name);
187 dest[klass_name_len] = '.';
188 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
189 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
190 dest[len] = 0;
191 return dest;
192 }
193
194 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
195 Symbol* klass_name = klass->name();
196 klass_name->as_klass_external_name(buf, size);
197 int len = (int)strlen(buf);
198
199 if (len < size - 1) {
200 buf[len++] = '.';
201
202 method_name->as_C_string(&(buf[len]), size - len);
203 len = (int)strlen(buf);
204
205 signature->as_C_string(&(buf[len]), size - len);
206 }
207
208 return buf;
209 }
368 address Method::bcp_from(address bcp) const {
369 if (is_native() && bcp == nullptr) {
370 return code_base();
371 } else {
372 return bcp;
373 }
374 }
375
376 int Method::size(bool is_native) {
377 // If native, then include pointers for native_function and signature_handler
378 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
379 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
380 return align_metadata_size(header_size() + extra_words);
381 }
382
383 Symbol* Method::klass_name() const {
384 return method_holder()->name();
385 }
386
387 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
388 log_trace(cds)("Iter(Method): %p", this);
389
390 if (!method_holder()->is_rewritten()) {
391 it->push(&_constMethod, MetaspaceClosure::_writable);
392 } else {
393 it->push(&_constMethod);
394 }
395 it->push(&_method_data);
396 it->push(&_method_counters);
397 NOT_PRODUCT(it->push(&_name);)
398 }
399
400 #if INCLUDE_CDS
401 // Attempt to return method to original state. Clear any pointers
402 // (to objects outside the shared spaces). We won't be able to predict
403 // where they should point in a new JVM. Further initialize some
404 // entries now in order allow them to be write protected later.
405
406 void Method::remove_unshareable_info() {
407 unlink_method();
408 JFR_ONLY(REMOVE_METHOD_ID(this);)
409 }
410
411 void Method::restore_unshareable_info(TRAPS) {
412 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
413 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
414 }
415 #endif
416
417 void Method::set_vtable_index(int index) {
418 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
419 // At runtime initialize_vtable is rerun as part of link_class_impl()
420 // for a shared class loaded by the non-boot loader to obtain the loader
421 // constraints based on the runtime classloaders' context.
422 return; // don't write into the shared class
423 } else {
424 _vtable_index = index;
425 }
426 }
427
428 void Method::set_itable_index(int index) {
429 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
430 // At runtime initialize_itable is rerun as part of link_class_impl()
431 // for a shared class loaded by the non-boot loader to obtain the loader
432 // constraints based on the runtime classloaders' context. The dumptime
433 // itable index should be the same as the runtime index.
560 // Counting based on signed int counters tends to overflow with
561 // longer-running workloads on fast machines. The counters under
562 // consideration here, however, are limited in range by counting
563 // logic. See InvocationCounter:count_limit for example.
564 // No "overflow precautions" need to be implemented here.
565 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
566 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
567 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
568
569 if (method_data() != nullptr) {
570 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
571 }
572
573 #ifndef PRODUCT
574 if (CountCompiledCalls) {
575 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
576 }
577 #endif
578 }
579
580 // Build a MethodData* object to hold profiling information collected on this
581 // method when requested.
582 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
583 // Do not profile the method if metaspace has hit an OOM previously
584 // allocating profiling data. Callers clear pending exception so don't
585 // add one here.
586 if (ClassLoaderDataGraph::has_metaspace_oom()) {
587 return;
588 }
589
590 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
591 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
592 if (HAS_PENDING_EXCEPTION) {
593 CompileBroker::log_metaspace_failure();
594 ClassLoaderDataGraph::set_metaspace_oom(true);
595 return; // return the exception (which is cleared)
596 }
597
598 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
599 MetadataFactory::free_metadata(loader_data, method_data);
600 return;
601 }
602
603 if (PrintMethodData && (Verbose || WizardMode)) {
604 ResourceMark rm(THREAD);
605 tty->print("build_profiling_method_data for ");
606 method->print_name(tty);
607 tty->cr();
608 // At the end of the run, the MDO, full of data, will be dumped.
609 }
610 }
611
612 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
613 // Do not profile the method if metaspace has hit an OOM previously
614 if (ClassLoaderDataGraph::has_metaspace_oom()) {
615 return nullptr;
616 }
617
618 methodHandle mh(current, m);
619 MethodCounters* counters;
620 if (current->is_Java_thread()) {
621 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
622 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
623 // if needed.
624 counters = MethodCounters::allocate_with_exception(mh, THREAD);
625 if (HAS_PENDING_EXCEPTION) {
626 CLEAR_PENDING_EXCEPTION;
627 }
628 } else {
629 // Call metaspace allocation that doesn't throw exception if the
630 // current thread isn't a JavaThread, ie. the VMThread.
631 counters = MethodCounters::allocate_no_exception(mh);
632 }
633
634 if (counters == nullptr) {
635 CompileBroker::log_metaspace_failure();
636 ClassLoaderDataGraph::set_metaspace_oom(true);
637 return nullptr;
638 }
639
640 if (!mh->init_method_counters(counters)) {
641 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
642 }
643
644 return mh->method_counters();
645 }
646
647 bool Method::init_method_counters(MethodCounters* counters) {
648 // Try to install a pointer to MethodCounters, return true on success.
649 return Atomic::replace_if_null(&_method_counters, counters);
650 }
651
652 void Method::set_exception_handler_entered(int handler_bci) {
653 if (ProfileExceptionHandlers) {
654 MethodData* mdo = method_data();
655 if (mdo != nullptr) {
656 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
657 handler_data.set_exception_handler_entered();
658 }
659 }
660 }
661
662 int Method::extra_stack_words() {
663 // not an inline function, to avoid a header dependency on Interpreter
854 return (is_static() ||
855 method_holder()->major_version() < 51);
856 }
857
858 bool Method::is_static_initializer() const {
859 // For classfiles version 51 or greater, ensure that the clinit method is
860 // static. Non-static methods with the name "<clinit>" are not static
861 // initializers. (older classfiles exempted for backward compatibility)
862 return name() == vmSymbols::class_initializer_name() &&
863 has_valid_initializer_flags();
864 }
865
866 bool Method::is_object_initializer() const {
867 return name() == vmSymbols::object_initializer_name();
868 }
869
870 bool Method::needs_clinit_barrier() const {
871 return is_static() && !method_holder()->is_initialized();
872 }
873
874 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
875 int length = method->checked_exceptions_length();
876 if (length == 0) { // common case
877 return objArrayHandle(THREAD, Universe::the_empty_class_array());
878 } else {
879 methodHandle h_this(THREAD, method);
880 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
881 objArrayHandle mirrors (THREAD, m_oop);
882 for (int i = 0; i < length; i++) {
883 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
884 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
885 if (log_is_enabled(Warning, exceptions) &&
886 !k->is_subclass_of(vmClasses::Throwable_klass())) {
887 ResourceMark rm(THREAD);
888 log_warning(exceptions)(
889 "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable",
890 k->external_name(), method->external_name());
891 }
892 mirrors->obj_at_put(i, k->java_mirror());
893 }
1133 }
1134
1135 #if INCLUDE_CDS
1136 // Called by class data sharing to remove any entry points (which are not shared)
1137 void Method::unlink_method() {
1138 assert(CDSConfig::is_dumping_archive(), "sanity");
1139 _code = nullptr;
1140 _adapter = nullptr;
1141 _i2i_entry = nullptr;
1142 _from_compiled_entry = nullptr;
1143 _from_interpreted_entry = nullptr;
1144
1145 if (is_native()) {
1146 *native_function_addr() = nullptr;
1147 set_signature_handler(nullptr);
1148 }
1149 NOT_PRODUCT(set_compiled_invocation_count(0);)
1150
1151 clear_method_data();
1152 clear_method_counters();
1153 remove_unshareable_flags();
1154 }
1155
1156 void Method::remove_unshareable_flags() {
1157 // clear all the flags that shouldn't be in the archived version
1158 assert(!is_old(), "must be");
1159 assert(!is_obsolete(), "must be");
1160 assert(!is_deleted(), "must be");
1161
1162 set_is_prefixed_native(false);
1163 set_queued_for_compilation(false);
1164 set_is_not_c2_compilable(false);
1165 set_is_not_c1_compilable(false);
1166 set_is_not_c2_osr_compilable(false);
1167 set_on_stack_flag(false);
1168 }
1169 #endif
1170
1171 // Called when the method_holder is getting linked. Setup entrypoints so the method
1172 // is ready to be called from interpreter, compiler, and vtables.
1173 void Method::link_method(const methodHandle& h_method, TRAPS) {
1174 if (log_is_enabled(Info, perf, class, link)) {
1175 ClassLoader::perf_ik_link_methods_count()->inc();
1176 }
1177
1178 // If the code cache is full, we may reenter this function for the
1179 // leftover methods that weren't linked.
1180 if (adapter() != nullptr) {
1181 return;
1182 }
1183 assert( _code == nullptr, "nothing compiled yet" );
1206 // called from the vtable. We need adapters on such methods that get loaded
1207 // later. Ditto for mega-morphic itable calls. If this proves to be a
1208 // problem we'll make these lazily later.
1209 (void) make_adapters(h_method, CHECK);
1210
1211 // ONLY USE the h_method now as make_adapter may have blocked
1212
1213 if (h_method->is_continuation_native_intrinsic()) {
1214 _from_interpreted_entry = nullptr;
1215 _from_compiled_entry = nullptr;
1216 _i2i_entry = nullptr;
1217 if (Continuations::enabled()) {
1218 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1219 AdapterHandlerLibrary::create_native_wrapper(h_method);
1220 if (!h_method->has_compiled_code()) {
1221 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1222 }
1223 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1224 }
1225 }
1226 }
1227
1228 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1229 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1230
1231 // Adapters for compiled code are made eagerly here. They are fairly
1232 // small (generally < 100 bytes) and quick to make (and cached and shared)
1233 // so making them eagerly shouldn't be too expensive.
1234 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1235 if (adapter == nullptr ) {
1236 if (!is_init_completed()) {
1237 // Don't throw exceptions during VM initialization because java.lang.* classes
1238 // might not have been initialized, causing problems when constructing the
1239 // Java exception object.
1240 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1241 } else {
1242 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1243 }
1244 }
1245
1246 mh->set_adapter_entry(adapter);
1247 mh->_from_compiled_entry = adapter->get_c2i_entry();
1248 return adapter->get_c2i_entry();
1249 }
1414 assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1415
1416 log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1417
1418 // invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1419 name->increment_refcount();
1420 signature->increment_refcount();
1421
1422 int cp_length = _imcp_limit;
1423 ClassLoaderData* loader_data = holder->class_loader_data();
1424 constantPoolHandle cp;
1425 {
1426 ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1427 cp = constantPoolHandle(THREAD, cp_oop);
1428 }
1429 cp->copy_fields(holder->constants());
1430 cp->set_pool_holder(holder);
1431 cp->symbol_at_put(_imcp_invoke_name, name);
1432 cp->symbol_at_put(_imcp_invoke_signature, signature);
1433 cp->set_has_preresolution();
1434
1435 // decide on access bits: public or not?
1436 int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1437 bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1438 if (must_be_static) flags_bits |= JVM_ACC_STATIC;
1439 assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1440
1441 methodHandle m;
1442 {
1443 InlineTableSizes sizes;
1444 Method* m_oop = Method::allocate(loader_data, 0,
1445 accessFlags_from(flags_bits), &sizes,
1446 ConstMethod::NORMAL,
1447 name,
1448 CHECK_(empty));
1449 m = methodHandle(THREAD, m_oop);
1450 }
1451 m->set_constants(cp());
1452 m->set_name_index(_imcp_invoke_name);
1453 m->set_signature_index(_imcp_invoke_signature);
1462 assert(m->intrinsic_id() == iid, "correctly predicted iid");
1463 #endif //ASSERT
1464
1465 // Finally, set up its entry points.
1466 assert(m->can_be_statically_bound(), "");
1467 m->set_vtable_index(Method::nonvirtual_vtable_index);
1468 m->link_method(m, CHECK_(empty));
1469
1470 if (iid == vmIntrinsics::_linkToNative) {
1471 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1472 }
1473 if (log_is_enabled(Debug, methodhandles)) {
1474 LogTarget(Debug, methodhandles) lt;
1475 LogStream ls(lt);
1476 m->print_on(&ls);
1477 }
1478
1479 return m;
1480 }
1481
1482 Klass* Method::check_non_bcp_klass(Klass* klass) {
1483 if (klass != nullptr && klass->class_loader() != nullptr) {
1484 if (klass->is_objArray_klass())
1485 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1486 return klass;
1487 }
1488 return nullptr;
1489 }
1490
1491
1492 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1493 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1494 // Code below does not work for native methods - they should never get rewritten anyway
1495 assert(!m->is_native(), "cannot rewrite native methods");
1496 // Allocate new Method*
1497 AccessFlags flags = m->access_flags();
1498
1499 ConstMethod* cm = m->constMethod();
1500 int checked_exceptions_len = cm->checked_exceptions_length();
1501 int localvariable_len = cm->localvariable_table_length();
1917 } else {
1918 return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
1919 ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
1920 }
1921 }
1922
1923 int Method::backedge_count() const {
1924 MethodCounters* mcs = method_counters();
1925 MethodData* mdo = method_data();
1926 if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
1927 ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
1928 return InvocationCounter::count_limit;
1929 } else {
1930 return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
1931 ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
1932 }
1933 }
1934
1935 int Method::highest_comp_level() const {
1936 const MethodCounters* mcs = method_counters();
1937 if (mcs != nullptr) {
1938 return mcs->highest_comp_level();
1939 } else {
1940 return CompLevel_none;
1941 }
1942 }
1943
1944 int Method::highest_osr_comp_level() const {
1945 const MethodCounters* mcs = method_counters();
1946 if (mcs != nullptr) {
1947 return mcs->highest_osr_comp_level();
1948 } else {
1949 return CompLevel_none;
1950 }
1951 }
1952
1953 void Method::set_highest_comp_level(int level) {
1954 MethodCounters* mcs = method_counters();
1955 if (mcs != nullptr) {
1956 mcs->set_highest_comp_level(level);
1957 }
1958 }
|
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/klass.inline.hpp"
57 #include "oops/method.inline.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/objArrayKlass.hpp"
60 #include "oops/objArrayOop.inline.hpp"
61 #include "oops/oop.inline.hpp"
62 #include "oops/symbol.hpp"
63 #include "oops/trainingData.hpp"
64 #include "prims/jvmtiExport.hpp"
65 #include "prims/methodHandles.hpp"
66 #include "runtime/atomic.hpp"
67 #include "runtime/arguments.hpp"
68 #include "runtime/continuationEntry.hpp"
69 #include "runtime/frame.inline.hpp"
70 #include "runtime/handles.inline.hpp"
71 #include "runtime/init.hpp"
72 #include "runtime/java.hpp"
73 #include "runtime/orderAccess.hpp"
74 #include "runtime/perfData.hpp"
75 #include "runtime/relocator.hpp"
76 #include "runtime/safepointVerifiers.hpp"
77 #include "runtime/sharedRuntime.hpp"
78 #include "runtime/signature.hpp"
79 #include "runtime/threads.hpp"
80 #include "runtime/vm_version.hpp"
81 #include "utilities/align.hpp"
82 #include "utilities/quickSort.hpp"
83 #include "utilities/vmError.hpp"
165 }
166
167 address Method::get_c2i_no_clinit_check_entry() {
168 assert(VM_Version::supports_fast_class_init_checks(), "");
169 assert(adapter() != nullptr, "must have");
170 return adapter()->get_c2i_no_clinit_check_entry();
171 }
172
173 char* Method::name_and_sig_as_C_string() const {
174 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
175 }
176
177 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
178 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
179 }
180
181 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
182 const char* klass_name = klass->external_name();
183 int klass_name_len = (int)strlen(klass_name);
184 int method_name_len = method_name->utf8_length();
185 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
186 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
187 strcpy(dest, klass_name);
188 dest[klass_name_len + 0] = ':';
189 dest[klass_name_len + 1] = ':';
190 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
191 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
192 dest[len] = 0;
193 return dest;
194 }
195
196 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
197 Symbol* klass_name = klass->name();
198 klass_name->as_klass_external_name(buf, size);
199 int len = (int)strlen(buf);
200
201 if (len < size - 1) {
202 buf[len++] = '.';
203
204 method_name->as_C_string(&(buf[len]), size - len);
205 len = (int)strlen(buf);
206
207 signature->as_C_string(&(buf[len]), size - len);
208 }
209
210 return buf;
211 }
370 address Method::bcp_from(address bcp) const {
371 if (is_native() && bcp == nullptr) {
372 return code_base();
373 } else {
374 return bcp;
375 }
376 }
377
378 int Method::size(bool is_native) {
379 // If native, then include pointers for native_function and signature_handler
380 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
381 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
382 return align_metadata_size(header_size() + extra_words);
383 }
384
385 Symbol* Method::klass_name() const {
386 return method_holder()->name();
387 }
388
389 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
390 LogStreamHandle(Trace, cds) lsh;
391 if (lsh.is_enabled()) {
392 lsh.print("Iter(Method): %p ", this);
393 print_external_name(&lsh);
394 lsh.cr();
395 }
396 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
397 // holder is null for MH intrinsic methods
398 it->push(&_constMethod, MetaspaceClosure::_writable);
399 } else {
400 it->push(&_constMethod);
401 }
402 it->push(&_method_data);
403 it->push(&_method_counters);
404 NOT_PRODUCT(it->push(&_name);)
405 }
406
407 #if INCLUDE_CDS
408 // Attempt to return method to original state. Clear any pointers
409 // (to objects outside the shared spaces). We won't be able to predict
410 // where they should point in a new JVM. Further initialize some
411 // entries now in order allow them to be write protected later.
412
413 void Method::remove_unshareable_info() {
414 unlink_method();
415 if (method_data() != nullptr) {
416 method_data()->remove_unshareable_info();
417 }
418 if (method_counters() != nullptr) {
419 method_counters()->remove_unshareable_info();
420 }
421 JFR_ONLY(REMOVE_METHOD_ID(this);)
422 }
423
424 void Method::restore_unshareable_info(TRAPS) {
425 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
426 if (method_data() != nullptr) {
427 method_data()->restore_unshareable_info(CHECK);
428 }
429 if (method_counters() != nullptr) {
430 method_counters()->restore_unshareable_info(CHECK);
431 }
432 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
433 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
434 }
435 #endif
436
437 void Method::set_vtable_index(int index) {
438 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
439 // At runtime initialize_vtable is rerun as part of link_class_impl()
440 // for a shared class loaded by the non-boot loader to obtain the loader
441 // constraints based on the runtime classloaders' context.
442 return; // don't write into the shared class
443 } else {
444 _vtable_index = index;
445 }
446 }
447
448 void Method::set_itable_index(int index) {
449 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
450 // At runtime initialize_itable is rerun as part of link_class_impl()
451 // for a shared class loaded by the non-boot loader to obtain the loader
452 // constraints based on the runtime classloaders' context. The dumptime
453 // itable index should be the same as the runtime index.
580 // Counting based on signed int counters tends to overflow with
581 // longer-running workloads on fast machines. The counters under
582 // consideration here, however, are limited in range by counting
583 // logic. See InvocationCounter:count_limit for example.
584 // No "overflow precautions" need to be implemented here.
585 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
586 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
587 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
588
589 if (method_data() != nullptr) {
590 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
591 }
592
593 #ifndef PRODUCT
594 if (CountCompiledCalls) {
595 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
596 }
597 #endif
598 }
599
600 MethodTrainingData* Method::training_data_or_null() const {
601 MethodCounters* mcs = method_counters();
602 if (mcs == nullptr) {
603 return nullptr;
604 } else {
605 return mcs->method_training_data();
606 }
607 }
608
609 bool Method::init_training_data(MethodTrainingData* tdata) {
610 MethodCounters* mcs = method_counters();
611 if (mcs == nullptr) {
612 return false;
613 } else {
614 return mcs->init_method_training_data(tdata);
615 }
616 }
617
618 bool Method::install_training_method_data(const methodHandle& method) {
619 MethodTrainingData* mtd = MethodTrainingData::find(method);
620 if (mtd != nullptr && mtd->has_holder() && mtd->final_profile() != nullptr &&
621 mtd->holder() == method() && mtd->final_profile()->method() == method()) { // FIXME
622 Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
623 return true;
624 }
625 return false;
626 }
627
628 // Build a MethodData* object to hold profiling information collected on this
629 // method when requested.
630 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
631 if (install_training_method_data(method)) {
632 return;
633 }
634 // Do not profile the method if metaspace has hit an OOM previously
635 // allocating profiling data. Callers clear pending exception so don't
636 // add one here.
637 if (ClassLoaderDataGraph::has_metaspace_oom()) {
638 return;
639 }
640
641 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
642 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
643 if (HAS_PENDING_EXCEPTION) {
644 CompileBroker::log_metaspace_failure();
645 ClassLoaderDataGraph::set_metaspace_oom(true);
646 return; // return the exception (which is cleared)
647 }
648
649 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
650 MetadataFactory::free_metadata(loader_data, method_data);
651 return;
652 }
653
654 /*
655 LogStreamHandle(Info, mdo) lsh;
656 if (lsh.is_enabled()) {
657 ResourceMark rm(THREAD);
658 lsh.print("build_profiling_method_data for ");
659 method->print_name(&lsh);
660 lsh.cr();
661 }
662 */
663 if (ForceProfiling && TrainingData::need_data()) {
664 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
665 guarantee(mtd != nullptr, "");
666 }
667 if (PrintMethodData) {
668 ResourceMark rm(THREAD);
669 tty->print("build_profiling_method_data for ");
670 method->print_name(tty);
671 tty->cr();
672 // At the end of the run, the MDO, full of data, will be dumped.
673 }
674 }
675
676 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
677 // Do not profile the method if metaspace has hit an OOM previously
678 if (ClassLoaderDataGraph::has_metaspace_oom()) {
679 return nullptr;
680 }
681
682 methodHandle mh(current, m);
683 MethodCounters* counters;
684 if (current->is_Java_thread()) {
685 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
686 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
687 // if needed.
688 counters = MethodCounters::allocate_with_exception(mh, THREAD);
689 if (HAS_PENDING_EXCEPTION) {
690 CLEAR_PENDING_EXCEPTION;
691 }
692 } else {
693 // Call metaspace allocation that doesn't throw exception if the
694 // current thread isn't a JavaThread, ie. the VMThread.
695 counters = MethodCounters::allocate_no_exception(mh);
696 }
697
698 if (counters == nullptr) {
699 CompileBroker::log_metaspace_failure();
700 ClassLoaderDataGraph::set_metaspace_oom(true);
701 return nullptr;
702 }
703
704 if (!mh->init_method_counters(counters)) {
705 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
706 }
707
708 if (ForceProfiling && TrainingData::need_data()) {
709 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
710 guarantee(mtd != nullptr, "");
711 }
712
713 return mh->method_counters();
714 }
715
716 bool Method::init_method_counters(MethodCounters* counters) {
717 // Try to install a pointer to MethodCounters, return true on success.
718 return Atomic::replace_if_null(&_method_counters, counters);
719 }
720
721 void Method::set_exception_handler_entered(int handler_bci) {
722 if (ProfileExceptionHandlers) {
723 MethodData* mdo = method_data();
724 if (mdo != nullptr) {
725 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
726 handler_data.set_exception_handler_entered();
727 }
728 }
729 }
730
731 int Method::extra_stack_words() {
732 // not an inline function, to avoid a header dependency on Interpreter
923 return (is_static() ||
924 method_holder()->major_version() < 51);
925 }
926
927 bool Method::is_static_initializer() const {
928 // For classfiles version 51 or greater, ensure that the clinit method is
929 // static. Non-static methods with the name "<clinit>" are not static
930 // initializers. (older classfiles exempted for backward compatibility)
931 return name() == vmSymbols::class_initializer_name() &&
932 has_valid_initializer_flags();
933 }
934
935 bool Method::is_object_initializer() const {
936 return name() == vmSymbols::object_initializer_name();
937 }
938
939 bool Method::needs_clinit_barrier() const {
940 return is_static() && !method_holder()->is_initialized();
941 }
942
943 bool Method::code_has_clinit_barriers() const {
944 nmethod* nm = code();
945 return (nm != nullptr) && nm->has_clinit_barriers();
946 }
947
948 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
949 int length = method->checked_exceptions_length();
950 if (length == 0) { // common case
951 return objArrayHandle(THREAD, Universe::the_empty_class_array());
952 } else {
953 methodHandle h_this(THREAD, method);
954 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
955 objArrayHandle mirrors (THREAD, m_oop);
956 for (int i = 0; i < length; i++) {
957 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
958 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
959 if (log_is_enabled(Warning, exceptions) &&
960 !k->is_subclass_of(vmClasses::Throwable_klass())) {
961 ResourceMark rm(THREAD);
962 log_warning(exceptions)(
963 "Class %s in throws clause of method %s is not a subtype of class java.lang.Throwable",
964 k->external_name(), method->external_name());
965 }
966 mirrors->obj_at_put(i, k->java_mirror());
967 }
1207 }
1208
1209 #if INCLUDE_CDS
1210 // Called by class data sharing to remove any entry points (which are not shared)
1211 void Method::unlink_method() {
1212 assert(CDSConfig::is_dumping_archive(), "sanity");
1213 _code = nullptr;
1214 _adapter = nullptr;
1215 _i2i_entry = nullptr;
1216 _from_compiled_entry = nullptr;
1217 _from_interpreted_entry = nullptr;
1218
1219 if (is_native()) {
1220 *native_function_addr() = nullptr;
1221 set_signature_handler(nullptr);
1222 }
1223 NOT_PRODUCT(set_compiled_invocation_count(0);)
1224
1225 clear_method_data();
1226 clear_method_counters();
1227 clear_is_not_c1_compilable();
1228 clear_is_not_c1_osr_compilable();
1229 clear_is_not_c2_compilable();
1230 clear_is_not_c2_osr_compilable();
1231 clear_queued_for_compilation();
1232 set_pending_queue_processed(false);
1233 remove_unshareable_flags();
1234 }
1235
1236 void Method::remove_unshareable_flags() {
1237 // clear all the flags that shouldn't be in the archived version
1238 assert(!is_old(), "must be");
1239 assert(!is_obsolete(), "must be");
1240 assert(!is_deleted(), "must be");
1241
1242 set_is_prefixed_native(false);
1243 set_queued_for_compilation(false);
1244 set_pending_queue_processed(false);
1245 set_is_not_c2_compilable(false);
1246 set_is_not_c1_compilable(false);
1247 set_is_not_c2_osr_compilable(false);
1248 set_on_stack_flag(false);
1249 }
1250 #endif
1251
1252 // Called when the method_holder is getting linked. Setup entrypoints so the method
1253 // is ready to be called from interpreter, compiler, and vtables.
1254 void Method::link_method(const methodHandle& h_method, TRAPS) {
1255 if (log_is_enabled(Info, perf, class, link)) {
1256 ClassLoader::perf_ik_link_methods_count()->inc();
1257 }
1258
1259 // If the code cache is full, we may reenter this function for the
1260 // leftover methods that weren't linked.
1261 if (adapter() != nullptr) {
1262 return;
1263 }
1264 assert( _code == nullptr, "nothing compiled yet" );
1287 // called from the vtable. We need adapters on such methods that get loaded
1288 // later. Ditto for mega-morphic itable calls. If this proves to be a
1289 // problem we'll make these lazily later.
1290 (void) make_adapters(h_method, CHECK);
1291
1292 // ONLY USE the h_method now as make_adapter may have blocked
1293
1294 if (h_method->is_continuation_native_intrinsic()) {
1295 _from_interpreted_entry = nullptr;
1296 _from_compiled_entry = nullptr;
1297 _i2i_entry = nullptr;
1298 if (Continuations::enabled()) {
1299 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1300 AdapterHandlerLibrary::create_native_wrapper(h_method);
1301 if (!h_method->has_compiled_code()) {
1302 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1303 }
1304 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1305 }
1306 }
1307 if (_preload_code != nullptr) {
1308 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1309 set_code(h_method, _preload_code);
1310 assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
1311 }
1312 }
1313
1314 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1315 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1316
1317 // Adapters for compiled code are made eagerly here. They are fairly
1318 // small (generally < 100 bytes) and quick to make (and cached and shared)
1319 // so making them eagerly shouldn't be too expensive.
1320 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1321 if (adapter == nullptr ) {
1322 if (!is_init_completed()) {
1323 // Don't throw exceptions during VM initialization because java.lang.* classes
1324 // might not have been initialized, causing problems when constructing the
1325 // Java exception object.
1326 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1327 } else {
1328 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1329 }
1330 }
1331
1332 mh->set_adapter_entry(adapter);
1333 mh->_from_compiled_entry = adapter->get_c2i_entry();
1334 return adapter->get_c2i_entry();
1335 }
1500 assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1501
1502 log_info(methodhandles)("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1503
1504 // invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1505 name->increment_refcount();
1506 signature->increment_refcount();
1507
1508 int cp_length = _imcp_limit;
1509 ClassLoaderData* loader_data = holder->class_loader_data();
1510 constantPoolHandle cp;
1511 {
1512 ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1513 cp = constantPoolHandle(THREAD, cp_oop);
1514 }
1515 cp->copy_fields(holder->constants());
1516 cp->set_pool_holder(holder);
1517 cp->symbol_at_put(_imcp_invoke_name, name);
1518 cp->symbol_at_put(_imcp_invoke_signature, signature);
1519 cp->set_has_preresolution();
1520 cp->set_is_for_method_handle_intrinsic();
1521
1522 // decide on access bits: public or not?
1523 int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1524 bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1525 if (must_be_static) flags_bits |= JVM_ACC_STATIC;
1526 assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1527
1528 methodHandle m;
1529 {
1530 InlineTableSizes sizes;
1531 Method* m_oop = Method::allocate(loader_data, 0,
1532 accessFlags_from(flags_bits), &sizes,
1533 ConstMethod::NORMAL,
1534 name,
1535 CHECK_(empty));
1536 m = methodHandle(THREAD, m_oop);
1537 }
1538 m->set_constants(cp());
1539 m->set_name_index(_imcp_invoke_name);
1540 m->set_signature_index(_imcp_invoke_signature);
1549 assert(m->intrinsic_id() == iid, "correctly predicted iid");
1550 #endif //ASSERT
1551
1552 // Finally, set up its entry points.
1553 assert(m->can_be_statically_bound(), "");
1554 m->set_vtable_index(Method::nonvirtual_vtable_index);
1555 m->link_method(m, CHECK_(empty));
1556
1557 if (iid == vmIntrinsics::_linkToNative) {
1558 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1559 }
1560 if (log_is_enabled(Debug, methodhandles)) {
1561 LogTarget(Debug, methodhandles) lt;
1562 LogStream ls(lt);
1563 m->print_on(&ls);
1564 }
1565
1566 return m;
1567 }
1568
1569 #if INCLUDE_CDS
1570 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1571 m->link_method(m, CHECK);
1572
1573 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1574 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1575 }
1576 }
1577 #endif
1578
1579 Klass* Method::check_non_bcp_klass(Klass* klass) {
1580 if (klass != nullptr && klass->class_loader() != nullptr) {
1581 if (klass->is_objArray_klass())
1582 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1583 return klass;
1584 }
1585 return nullptr;
1586 }
1587
1588
1589 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
1590 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1591 // Code below does not work for native methods - they should never get rewritten anyway
1592 assert(!m->is_native(), "cannot rewrite native methods");
1593 // Allocate new Method*
1594 AccessFlags flags = m->access_flags();
1595
1596 ConstMethod* cm = m->constMethod();
1597 int checked_exceptions_len = cm->checked_exceptions_length();
1598 int localvariable_len = cm->localvariable_table_length();
2014 } else {
2015 return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
2016 ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
2017 }
2018 }
2019
2020 int Method::backedge_count() const {
2021 MethodCounters* mcs = method_counters();
2022 MethodData* mdo = method_data();
2023 if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
2024 ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
2025 return InvocationCounter::count_limit;
2026 } else {
2027 return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
2028 ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
2029 }
2030 }
2031
2032 int Method::highest_comp_level() const {
2033 const MethodCounters* mcs = method_counters();
2034 nmethod* nm = code();
2035 int level = (nm != nullptr) ? nm->comp_level() : CompLevel_none;
2036 if (mcs != nullptr) {
2037 return MAX2(mcs->highest_comp_level(), level);
2038 } else {
2039 return CompLevel_none;
2040 }
2041 }
2042
2043 int Method::highest_osr_comp_level() const {
2044 const MethodCounters* mcs = method_counters();
2045 if (mcs != nullptr) {
2046 return mcs->highest_osr_comp_level();
2047 } else {
2048 return CompLevel_none;
2049 }
2050 }
2051
2052 void Method::set_highest_comp_level(int level) {
2053 MethodCounters* mcs = method_counters();
2054 if (mcs != nullptr) {
2055 mcs->set_highest_comp_level(level);
2056 }
2057 }
|