43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/klass.inline.hpp"
57 #include "oops/method.inline.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/objArrayKlass.hpp"
60 #include "oops/objArrayOop.inline.hpp"
61 #include "oops/oop.inline.hpp"
62 #include "oops/symbol.hpp"
63 #include "prims/jvmtiExport.hpp"
64 #include "prims/methodHandles.hpp"
65 #include "runtime/atomic.hpp"
66 #include "runtime/arguments.hpp"
67 #include "runtime/continuationEntry.hpp"
68 #include "runtime/frame.inline.hpp"
69 #include "runtime/handles.inline.hpp"
70 #include "runtime/init.hpp"
71 #include "runtime/java.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/perfData.hpp"
74 #include "runtime/relocator.hpp"
75 #include "runtime/safepointVerifiers.hpp"
76 #include "runtime/sharedRuntime.hpp"
77 #include "runtime/signature.hpp"
78 #include "runtime/threads.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "utilities/align.hpp"
81 #include "utilities/quickSort.hpp"
82 #include "utilities/vmError.hpp"
164 }
165
166 address Method::get_c2i_no_clinit_check_entry() {
167 assert(VM_Version::supports_fast_class_init_checks(), "");
168 assert(adapter() != nullptr, "must have");
169 return adapter()->get_c2i_no_clinit_check_entry();
170 }
171
172 char* Method::name_and_sig_as_C_string() const {
173 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
174 }
175
176 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
177 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
178 }
179
180 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
181 const char* klass_name = klass->external_name();
182 int klass_name_len = (int)strlen(klass_name);
183 int method_name_len = method_name->utf8_length();
184 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
185 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
186 strcpy(dest, klass_name);
187 dest[klass_name_len] = '.';
188 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
189 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
190 dest[len] = 0;
191 return dest;
192 }
193
194 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
195 Symbol* klass_name = klass->name();
196 klass_name->as_klass_external_name(buf, size);
197 int len = (int)strlen(buf);
198
199 if (len < size - 1) {
200 buf[len++] = '.';
201
202 method_name->as_C_string(&(buf[len]), size - len);
203 len = (int)strlen(buf);
204
205 signature->as_C_string(&(buf[len]), size - len);
206 }
207
208 return buf;
209 }
369 address Method::bcp_from(address bcp) const {
370 if (is_native() && bcp == nullptr) {
371 return code_base();
372 } else {
373 return bcp;
374 }
375 }
376
377 int Method::size(bool is_native) {
378 // If native, then include pointers for native_function and signature_handler
379 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
380 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
381 return align_metadata_size(header_size() + extra_words);
382 }
383
384 Symbol* Method::klass_name() const {
385 return method_holder()->name();
386 }
387
388 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
389 log_trace(cds)("Iter(Method): %p", this);
390
391 if (!method_holder()->is_rewritten()) {
392 it->push(&_constMethod, MetaspaceClosure::_writable);
393 } else {
394 it->push(&_constMethod);
395 }
396 it->push(&_method_data);
397 it->push(&_method_counters);
398 NOT_PRODUCT(it->push(&_name);)
399 }
400
401 #if INCLUDE_CDS
402 // Attempt to return method to original state. Clear any pointers
403 // (to objects outside the shared spaces). We won't be able to predict
404 // where they should point in a new JVM. Further initialize some
405 // entries now in order allow them to be write protected later.
406
407 void Method::remove_unshareable_info() {
408 unlink_method();
409 JFR_ONLY(REMOVE_METHOD_ID(this);)
410 }
411
412 void Method::restore_unshareable_info(TRAPS) {
413 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
414 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
415 }
416 #endif
417
418 void Method::set_vtable_index(int index) {
419 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
420 // At runtime initialize_vtable is rerun as part of link_class_impl()
421 // for a shared class loaded by the non-boot loader to obtain the loader
422 // constraints based on the runtime classloaders' context.
423 return; // don't write into the shared class
424 } else {
425 _vtable_index = index;
426 }
427 }
428
429 void Method::set_itable_index(int index) {
430 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
431 // At runtime initialize_itable is rerun as part of link_class_impl()
432 // for a shared class loaded by the non-boot loader to obtain the loader
433 // constraints based on the runtime classloaders' context. The dumptime
434 // itable index should be the same as the runtime index.
561 // Counting based on signed int counters tends to overflow with
562 // longer-running workloads on fast machines. The counters under
563 // consideration here, however, are limited in range by counting
564 // logic. See InvocationCounter:count_limit for example.
565 // No "overflow precautions" need to be implemented here.
566 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
567 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
568 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
569
570 if (method_data() != nullptr) {
571 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
572 }
573
574 #ifndef PRODUCT
575 if (CountCompiledCalls) {
576 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
577 }
578 #endif
579 }
580
581 // Build a MethodData* object to hold profiling information collected on this
582 // method when requested.
583 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
584 // Do not profile the method if metaspace has hit an OOM previously
585 // allocating profiling data. Callers clear pending exception so don't
586 // add one here.
587 if (ClassLoaderDataGraph::has_metaspace_oom()) {
588 return;
589 }
590
591 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
592 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
593 if (HAS_PENDING_EXCEPTION) {
594 CompileBroker::log_metaspace_failure();
595 ClassLoaderDataGraph::set_metaspace_oom(true);
596 return; // return the exception (which is cleared)
597 }
598
599 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
600 MetadataFactory::free_metadata(loader_data, method_data);
601 return;
602 }
603
604 if (PrintMethodData && (Verbose || WizardMode)) {
605 ResourceMark rm(THREAD);
606 tty->print("build_profiling_method_data for ");
607 method->print_name(tty);
608 tty->cr();
609 // At the end of the run, the MDO, full of data, will be dumped.
610 }
611 }
612
613 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
614 // Do not profile the method if metaspace has hit an OOM previously
615 if (ClassLoaderDataGraph::has_metaspace_oom()) {
616 return nullptr;
617 }
618
619 methodHandle mh(current, m);
620 MethodCounters* counters;
621 if (current->is_Java_thread()) {
622 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
623 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
624 // if needed.
625 counters = MethodCounters::allocate_with_exception(mh, THREAD);
626 if (HAS_PENDING_EXCEPTION) {
627 CLEAR_PENDING_EXCEPTION;
628 }
629 } else {
630 // Call metaspace allocation that doesn't throw exception if the
631 // current thread isn't a JavaThread, ie. the VMThread.
632 counters = MethodCounters::allocate_no_exception(mh);
633 }
634
635 if (counters == nullptr) {
636 CompileBroker::log_metaspace_failure();
637 ClassLoaderDataGraph::set_metaspace_oom(true);
638 return nullptr;
639 }
640
641 if (!mh->init_method_counters(counters)) {
642 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
643 }
644
645 return mh->method_counters();
646 }
647
648 bool Method::init_method_counters(MethodCounters* counters) {
649 // Try to install a pointer to MethodCounters, return true on success.
650 return Atomic::replace_if_null(&_method_counters, counters);
651 }
652
653 void Method::set_exception_handler_entered(int handler_bci) {
654 if (ProfileExceptionHandlers) {
655 MethodData* mdo = method_data();
656 if (mdo != nullptr) {
657 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
658 handler_data.set_exception_handler_entered();
659 }
660 }
661 }
662
663 int Method::extra_stack_words() {
664 // not an inline function, to avoid a header dependency on Interpreter
851 return (is_static() ||
852 method_holder()->major_version() < 51);
853 }
854
855 bool Method::is_static_initializer() const {
856 // For classfiles version 51 or greater, ensure that the clinit method is
857 // static. Non-static methods with the name "<clinit>" are not static
858 // initializers. (older classfiles exempted for backward compatibility)
859 return name() == vmSymbols::class_initializer_name() &&
860 has_valid_initializer_flags();
861 }
862
863 bool Method::is_object_initializer() const {
864 return name() == vmSymbols::object_initializer_name();
865 }
866
867 bool Method::needs_clinit_barrier() const {
868 return is_static() && !method_holder()->is_initialized();
869 }
870
871 bool Method::is_object_wait0() const {
872 return klass_name() == vmSymbols::java_lang_Object()
873 && name() == vmSymbols::wait_name();
874 }
875
876 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
877 int length = method->checked_exceptions_length();
878 if (length == 0) { // common case
879 return objArrayHandle(THREAD, Universe::the_empty_class_array());
880 } else {
881 methodHandle h_this(THREAD, method);
882 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
883 objArrayHandle mirrors (THREAD, m_oop);
884 for (int i = 0; i < length; i++) {
885 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
886 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
887 if (log_is_enabled(Warning, exceptions) &&
888 !k->is_subclass_of(vmClasses::Throwable_klass())) {
889 ResourceMark rm(THREAD);
890 log_warning(exceptions)(
1134 }
1135
1136 #if INCLUDE_CDS
1137 // Called by class data sharing to remove any entry points (which are not shared)
1138 void Method::unlink_method() {
1139 assert(CDSConfig::is_dumping_archive(), "sanity");
1140 _code = nullptr;
1141 _adapter = nullptr;
1142 _i2i_entry = nullptr;
1143 _from_compiled_entry = nullptr;
1144 _from_interpreted_entry = nullptr;
1145
1146 if (is_native()) {
1147 *native_function_addr() = nullptr;
1148 set_signature_handler(nullptr);
1149 }
1150 NOT_PRODUCT(set_compiled_invocation_count(0);)
1151
1152 clear_method_data();
1153 clear_method_counters();
1154 remove_unshareable_flags();
1155 }
1156
1157 void Method::remove_unshareable_flags() {
1158 // clear all the flags that shouldn't be in the archived version
1159 assert(!is_old(), "must be");
1160 assert(!is_obsolete(), "must be");
1161 assert(!is_deleted(), "must be");
1162
1163 set_is_prefixed_native(false);
1164 set_queued_for_compilation(false);
1165 set_is_not_c2_compilable(false);
1166 set_is_not_c1_compilable(false);
1167 set_is_not_c2_osr_compilable(false);
1168 set_on_stack_flag(false);
1169 }
1170 #endif
1171
1172 // Called when the method_holder is getting linked. Setup entrypoints so the method
1173 // is ready to be called from interpreter, compiler, and vtables.
1174 void Method::link_method(const methodHandle& h_method, TRAPS) {
1175 if (log_is_enabled(Info, perf, class, link)) {
1176 ClassLoader::perf_ik_link_methods_count()->inc();
1177 }
1178
1179 // If the code cache is full, we may reenter this function for the
1180 // leftover methods that weren't linked.
1181 if (adapter() != nullptr) {
1182 return;
1183 }
1184 assert( _code == nullptr, "nothing compiled yet" );
1185
1186 // Setup interpreter entrypoint
1187 assert(this == h_method(), "wrong h_method()" );
1188
1207 // called from the vtable. We need adapters on such methods that get loaded
1208 // later. Ditto for mega-morphic itable calls. If this proves to be a
1209 // problem we'll make these lazily later.
1210 (void) make_adapters(h_method, CHECK);
1211
1212 // ONLY USE the h_method now as make_adapter may have blocked
1213
1214 if (h_method->is_continuation_native_intrinsic()) {
1215 _from_interpreted_entry = nullptr;
1216 _from_compiled_entry = nullptr;
1217 _i2i_entry = nullptr;
1218 if (Continuations::enabled()) {
1219 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1220 AdapterHandlerLibrary::create_native_wrapper(h_method);
1221 if (!h_method->has_compiled_code()) {
1222 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1223 }
1224 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1225 }
1226 }
1227 }
1228
1229 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1230 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1231
1232 // Adapters for compiled code are made eagerly here. They are fairly
1233 // small (generally < 100 bytes) and quick to make (and cached and shared)
1234 // so making them eagerly shouldn't be too expensive.
1235 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1236 if (adapter == nullptr ) {
1237 if (!is_init_completed()) {
1238 // Don't throw exceptions during VM initialization because java.lang.* classes
1239 // might not have been initialized, causing problems when constructing the
1240 // Java exception object.
1241 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1242 } else {
1243 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1244 }
1245 }
1246
1247 mh->set_adapter_entry(adapter);
1248 mh->_from_compiled_entry = adapter->get_c2i_entry();
1249 return adapter->get_c2i_entry();
1250 }
1929 } else {
1930 return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
1931 ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
1932 }
1933 }
1934
1935 int Method::backedge_count() const {
1936 MethodCounters* mcs = method_counters();
1937 MethodData* mdo = method_data();
1938 if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
1939 ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
1940 return InvocationCounter::count_limit;
1941 } else {
1942 return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
1943 ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
1944 }
1945 }
1946
1947 int Method::highest_comp_level() const {
1948 const MethodCounters* mcs = method_counters();
1949 if (mcs != nullptr) {
1950 return mcs->highest_comp_level();
1951 } else {
1952 return CompLevel_none;
1953 }
1954 }
1955
1956 int Method::highest_osr_comp_level() const {
1957 const MethodCounters* mcs = method_counters();
1958 if (mcs != nullptr) {
1959 return mcs->highest_osr_comp_level();
1960 } else {
1961 return CompLevel_none;
1962 }
1963 }
1964
1965 void Method::set_highest_comp_level(int level) {
1966 MethodCounters* mcs = method_counters();
1967 if (mcs != nullptr) {
1968 mcs->set_highest_comp_level(level);
1969 }
1970 }
|
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/klass.inline.hpp"
57 #include "oops/method.inline.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/objArrayKlass.hpp"
60 #include "oops/objArrayOop.inline.hpp"
61 #include "oops/oop.inline.hpp"
62 #include "oops/symbol.hpp"
63 #include "oops/trainingData.hpp"
64 #include "prims/jvmtiExport.hpp"
65 #include "prims/methodHandles.hpp"
66 #include "runtime/atomic.hpp"
67 #include "runtime/arguments.hpp"
68 #include "runtime/continuationEntry.hpp"
69 #include "runtime/frame.inline.hpp"
70 #include "runtime/handles.inline.hpp"
71 #include "runtime/init.hpp"
72 #include "runtime/java.hpp"
73 #include "runtime/orderAccess.hpp"
74 #include "runtime/perfData.hpp"
75 #include "runtime/relocator.hpp"
76 #include "runtime/safepointVerifiers.hpp"
77 #include "runtime/sharedRuntime.hpp"
78 #include "runtime/signature.hpp"
79 #include "runtime/threads.hpp"
80 #include "runtime/vm_version.hpp"
81 #include "utilities/align.hpp"
82 #include "utilities/quickSort.hpp"
83 #include "utilities/vmError.hpp"
165 }
166
167 address Method::get_c2i_no_clinit_check_entry() {
168 assert(VM_Version::supports_fast_class_init_checks(), "");
169 assert(adapter() != nullptr, "must have");
170 return adapter()->get_c2i_no_clinit_check_entry();
171 }
172
173 char* Method::name_and_sig_as_C_string() const {
174 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
175 }
176
177 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
178 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
179 }
180
181 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
182 const char* klass_name = klass->external_name();
183 int klass_name_len = (int)strlen(klass_name);
184 int method_name_len = method_name->utf8_length();
185 int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
186 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
187 strcpy(dest, klass_name);
188 dest[klass_name_len + 0] = ':';
189 dest[klass_name_len + 1] = ':';
190 strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
191 strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
192 dest[len] = 0;
193 return dest;
194 }
195
196 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
197 Symbol* klass_name = klass->name();
198 klass_name->as_klass_external_name(buf, size);
199 int len = (int)strlen(buf);
200
201 if (len < size - 1) {
202 buf[len++] = '.';
203
204 method_name->as_C_string(&(buf[len]), size - len);
205 len = (int)strlen(buf);
206
207 signature->as_C_string(&(buf[len]), size - len);
208 }
209
210 return buf;
211 }
371 address Method::bcp_from(address bcp) const {
372 if (is_native() && bcp == nullptr) {
373 return code_base();
374 } else {
375 return bcp;
376 }
377 }
378
379 int Method::size(bool is_native) {
380 // If native, then include pointers for native_function and signature_handler
381 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
382 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
383 return align_metadata_size(header_size() + extra_words);
384 }
385
386 Symbol* Method::klass_name() const {
387 return method_holder()->name();
388 }
389
390 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
391 LogStreamHandle(Trace, cds) lsh;
392 if (lsh.is_enabled()) {
393 lsh.print("Iter(Method): %p ", this);
394 print_external_name(&lsh);
395 lsh.cr();
396 }
397 if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
398 // holder is null for MH intrinsic methods
399 it->push(&_constMethod, MetaspaceClosure::_writable);
400 } else {
401 it->push(&_constMethod);
402 }
403 it->push(&_method_data);
404 it->push(&_method_counters);
405 NOT_PRODUCT(it->push(&_name);)
406 }
407
408 #if INCLUDE_CDS
409 // Attempt to return method to original state. Clear any pointers
410 // (to objects outside the shared spaces). We won't be able to predict
411 // where they should point in a new JVM. Further initialize some
412 // entries now in order allow them to be write protected later.
413
414 void Method::remove_unshareable_info() {
415 unlink_method();
416 if (method_data() != nullptr) {
417 method_data()->remove_unshareable_info();
418 }
419 if (method_counters() != nullptr) {
420 method_counters()->remove_unshareable_info();
421 }
422 JFR_ONLY(REMOVE_METHOD_ID(this);)
423 }
424
425 void Method::restore_unshareable_info(TRAPS) {
426 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
427 if (method_data() != nullptr) {
428 method_data()->restore_unshareable_info(CHECK);
429 }
430 if (method_counters() != nullptr) {
431 method_counters()->restore_unshareable_info(CHECK);
432 }
433 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
434 assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
435 }
436 #endif
437
438 void Method::set_vtable_index(int index) {
439 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
440 // At runtime initialize_vtable is rerun as part of link_class_impl()
441 // for a shared class loaded by the non-boot loader to obtain the loader
442 // constraints based on the runtime classloaders' context.
443 return; // don't write into the shared class
444 } else {
445 _vtable_index = index;
446 }
447 }
448
449 void Method::set_itable_index(int index) {
450 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
451 // At runtime initialize_itable is rerun as part of link_class_impl()
452 // for a shared class loaded by the non-boot loader to obtain the loader
453 // constraints based on the runtime classloaders' context. The dumptime
454 // itable index should be the same as the runtime index.
581 // Counting based on signed int counters tends to overflow with
582 // longer-running workloads on fast machines. The counters under
583 // consideration here, however, are limited in range by counting
584 // logic. See InvocationCounter:count_limit for example.
585 // No "overflow precautions" need to be implemented here.
586 st->print_cr (" interpreter_invocation_count: " INT32_FORMAT_W(11), interpreter_invocation_count());
587 st->print_cr (" invocation_counter: " INT32_FORMAT_W(11), invocation_count());
588 st->print_cr (" backedge_counter: " INT32_FORMAT_W(11), backedge_count());
589
590 if (method_data() != nullptr) {
591 st->print_cr (" decompile_count: " UINT32_FORMAT_W(11), method_data()->decompile_count());
592 }
593
594 #ifndef PRODUCT
595 if (CountCompiledCalls) {
596 st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
597 }
598 #endif
599 }
600
601 MethodTrainingData* Method::training_data_or_null() const {
602 MethodCounters* mcs = method_counters();
603 if (mcs == nullptr) {
604 return nullptr;
605 } else {
606 return mcs->method_training_data();
607 }
608 }
609
610 bool Method::init_training_data(MethodTrainingData* tdata) {
611 MethodCounters* mcs = method_counters();
612 if (mcs == nullptr) {
613 return false;
614 } else {
615 return mcs->init_method_training_data(tdata);
616 }
617 }
618
619 bool Method::install_training_method_data(const methodHandle& method) {
620 MethodTrainingData* mtd = MethodTrainingData::find(method);
621 if (mtd != nullptr && mtd->has_holder() && mtd->final_profile() != nullptr &&
622 mtd->holder() == method() && mtd->final_profile()->method() == method()) { // FIXME
623 Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
624 return true;
625 }
626 return false;
627 }
628
629 // Build a MethodData* object to hold profiling information collected on this
630 // method when requested.
631 void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
632 if (install_training_method_data(method)) {
633 return;
634 }
635 // Do not profile the method if metaspace has hit an OOM previously
636 // allocating profiling data. Callers clear pending exception so don't
637 // add one here.
638 if (ClassLoaderDataGraph::has_metaspace_oom()) {
639 return;
640 }
641
642 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
643 MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
644 if (HAS_PENDING_EXCEPTION) {
645 CompileBroker::log_metaspace_failure();
646 ClassLoaderDataGraph::set_metaspace_oom(true);
647 return; // return the exception (which is cleared)
648 }
649
650 if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
651 MetadataFactory::free_metadata(loader_data, method_data);
652 return;
653 }
654
655 /*
656 LogStreamHandle(Info, mdo) lsh;
657 if (lsh.is_enabled()) {
658 ResourceMark rm(THREAD);
659 lsh.print("build_profiling_method_data for ");
660 method->print_name(&lsh);
661 lsh.cr();
662 }
663 */
664 if (ForceProfiling && TrainingData::need_data()) {
665 MethodTrainingData* mtd = MethodTrainingData::make(method, false);
666 guarantee(mtd != nullptr, "");
667 }
668 if (PrintMethodData) {
669 ResourceMark rm(THREAD);
670 tty->print("build_profiling_method_data for ");
671 method->print_name(tty);
672 tty->cr();
673 // At the end of the run, the MDO, full of data, will be dumped.
674 }
675 }
676
677 MethodCounters* Method::build_method_counters(Thread* current, Method* m) {
678 // Do not profile the method if metaspace has hit an OOM previously
679 if (ClassLoaderDataGraph::has_metaspace_oom()) {
680 return nullptr;
681 }
682
683 methodHandle mh(current, m);
684 MethodCounters* counters;
685 if (current->is_Java_thread()) {
686 JavaThread* THREAD = JavaThread::cast(current); // For exception macros.
687 // Use the TRAPS version for a JavaThread so it will adjust the GC threshold
688 // if needed.
689 counters = MethodCounters::allocate_with_exception(mh, THREAD);
690 if (HAS_PENDING_EXCEPTION) {
691 CLEAR_PENDING_EXCEPTION;
692 }
693 } else {
694 // Call metaspace allocation that doesn't throw exception if the
695 // current thread isn't a JavaThread, ie. the VMThread.
696 counters = MethodCounters::allocate_no_exception(mh);
697 }
698
699 if (counters == nullptr) {
700 CompileBroker::log_metaspace_failure();
701 ClassLoaderDataGraph::set_metaspace_oom(true);
702 return nullptr;
703 }
704
705 if (!mh->init_method_counters(counters)) {
706 MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
707 }
708
709 if (ForceProfiling && TrainingData::need_data()) {
710 MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
711 guarantee(mtd != nullptr, "");
712 }
713
714 return mh->method_counters();
715 }
716
717 bool Method::init_method_counters(MethodCounters* counters) {
718 // Try to install a pointer to MethodCounters, return true on success.
719 return Atomic::replace_if_null(&_method_counters, counters);
720 }
721
722 void Method::set_exception_handler_entered(int handler_bci) {
723 if (ProfileExceptionHandlers) {
724 MethodData* mdo = method_data();
725 if (mdo != nullptr) {
726 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
727 handler_data.set_exception_handler_entered();
728 }
729 }
730 }
731
732 int Method::extra_stack_words() {
733 // not an inline function, to avoid a header dependency on Interpreter
920 return (is_static() ||
921 method_holder()->major_version() < 51);
922 }
923
924 bool Method::is_static_initializer() const {
925 // For classfiles version 51 or greater, ensure that the clinit method is
926 // static. Non-static methods with the name "<clinit>" are not static
927 // initializers. (older classfiles exempted for backward compatibility)
928 return name() == vmSymbols::class_initializer_name() &&
929 has_valid_initializer_flags();
930 }
931
932 bool Method::is_object_initializer() const {
933 return name() == vmSymbols::object_initializer_name();
934 }
935
936 bool Method::needs_clinit_barrier() const {
937 return is_static() && !method_holder()->is_initialized();
938 }
939
940 bool Method::code_has_clinit_barriers() const {
941 nmethod* nm = code();
942 return (nm != nullptr) && nm->has_clinit_barriers();
943 }
944
945 bool Method::is_object_wait0() const {
946 return klass_name() == vmSymbols::java_lang_Object()
947 && name() == vmSymbols::wait_name();
948 }
949
950 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
951 int length = method->checked_exceptions_length();
952 if (length == 0) { // common case
953 return objArrayHandle(THREAD, Universe::the_empty_class_array());
954 } else {
955 methodHandle h_this(THREAD, method);
956 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
957 objArrayHandle mirrors (THREAD, m_oop);
958 for (int i = 0; i < length; i++) {
959 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
960 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
961 if (log_is_enabled(Warning, exceptions) &&
962 !k->is_subclass_of(vmClasses::Throwable_klass())) {
963 ResourceMark rm(THREAD);
964 log_warning(exceptions)(
1208 }
1209
1210 #if INCLUDE_CDS
1211 // Called by class data sharing to remove any entry points (which are not shared)
1212 void Method::unlink_method() {
1213 assert(CDSConfig::is_dumping_archive(), "sanity");
1214 _code = nullptr;
1215 _adapter = nullptr;
1216 _i2i_entry = nullptr;
1217 _from_compiled_entry = nullptr;
1218 _from_interpreted_entry = nullptr;
1219
1220 if (is_native()) {
1221 *native_function_addr() = nullptr;
1222 set_signature_handler(nullptr);
1223 }
1224 NOT_PRODUCT(set_compiled_invocation_count(0);)
1225
1226 clear_method_data();
1227 clear_method_counters();
1228 clear_is_not_c1_compilable();
1229 clear_is_not_c1_osr_compilable();
1230 clear_is_not_c2_compilable();
1231 clear_is_not_c2_osr_compilable();
1232 clear_queued_for_compilation();
1233 set_pending_queue_processed(false);
1234 remove_unshareable_flags();
1235 }
1236
1237 void Method::remove_unshareable_flags() {
1238 // clear all the flags that shouldn't be in the archived version
1239 assert(!is_old(), "must be");
1240 assert(!is_obsolete(), "must be");
1241 assert(!is_deleted(), "must be");
1242
1243 set_is_prefixed_native(false);
1244 set_queued_for_compilation(false);
1245 set_pending_queue_processed(false);
1246 set_is_not_c2_compilable(false);
1247 set_is_not_c1_compilable(false);
1248 set_is_not_c2_osr_compilable(false);
1249 set_on_stack_flag(false);
1250 set_has_upcall_on_method_entry(false);
1251 set_has_upcall_on_method_exit(false);
1252 }
1253 #endif
1254
1255 // Called when the method_holder is getting linked. Setup entrypoints so the method
1256 // is ready to be called from interpreter, compiler, and vtables.
1257 void Method::link_method(const methodHandle& h_method, TRAPS) {
1258 if (log_is_enabled(Info, perf, class, link)) {
1259 ClassLoader::perf_ik_link_methods_count()->inc();
1260 }
1261
1262 // If the code cache is full, we may reenter this function for the
1263 // leftover methods that weren't linked.
1264 if (adapter() != nullptr) {
1265 return;
1266 }
1267 assert( _code == nullptr, "nothing compiled yet" );
1268
1269 // Setup interpreter entrypoint
1270 assert(this == h_method(), "wrong h_method()" );
1271
1290 // called from the vtable. We need adapters on such methods that get loaded
1291 // later. Ditto for mega-morphic itable calls. If this proves to be a
1292 // problem we'll make these lazily later.
1293 (void) make_adapters(h_method, CHECK);
1294
1295 // ONLY USE the h_method now as make_adapter may have blocked
1296
1297 if (h_method->is_continuation_native_intrinsic()) {
1298 _from_interpreted_entry = nullptr;
1299 _from_compiled_entry = nullptr;
1300 _i2i_entry = nullptr;
1301 if (Continuations::enabled()) {
1302 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1303 AdapterHandlerLibrary::create_native_wrapper(h_method);
1304 if (!h_method->has_compiled_code()) {
1305 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1306 }
1307 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1308 }
1309 }
1310 if (_preload_code != nullptr) {
1311 MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
1312 set_code(h_method, _preload_code);
1313 assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
1314 }
1315 }
1316
1317 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1318 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
1319
1320 // Adapters for compiled code are made eagerly here. They are fairly
1321 // small (generally < 100 bytes) and quick to make (and cached and shared)
1322 // so making them eagerly shouldn't be too expensive.
1323 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1324 if (adapter == nullptr ) {
1325 if (!is_init_completed()) {
1326 // Don't throw exceptions during VM initialization because java.lang.* classes
1327 // might not have been initialized, causing problems when constructing the
1328 // Java exception object.
1329 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1330 } else {
1331 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1332 }
1333 }
1334
1335 mh->set_adapter_entry(adapter);
1336 mh->_from_compiled_entry = adapter->get_c2i_entry();
1337 return adapter->get_c2i_entry();
1338 }
2017 } else {
2018 return ((mcs != nullptr) ? mcs->invocation_counter()->count() : 0) +
2019 ((mdo != nullptr) ? mdo->invocation_counter()->count() : 0);
2020 }
2021 }
2022
2023 int Method::backedge_count() const {
2024 MethodCounters* mcs = method_counters();
2025 MethodData* mdo = method_data();
2026 if (((mcs != nullptr) ? mcs->backedge_counter()->carry() : false) ||
2027 ((mdo != nullptr) ? mdo->backedge_counter()->carry() : false)) {
2028 return InvocationCounter::count_limit;
2029 } else {
2030 return ((mcs != nullptr) ? mcs->backedge_counter()->count() : 0) +
2031 ((mdo != nullptr) ? mdo->backedge_counter()->count() : 0);
2032 }
2033 }
2034
2035 int Method::highest_comp_level() const {
2036 const MethodCounters* mcs = method_counters();
2037 nmethod* nm = code();
2038 int level = (nm != nullptr) ? nm->comp_level() : CompLevel_none;
2039 if (mcs != nullptr) {
2040 return MAX2(mcs->highest_comp_level(), level);
2041 } else {
2042 return CompLevel_none;
2043 }
2044 }
2045
2046 int Method::highest_osr_comp_level() const {
2047 const MethodCounters* mcs = method_counters();
2048 if (mcs != nullptr) {
2049 return mcs->highest_osr_comp_level();
2050 } else {
2051 return CompLevel_none;
2052 }
2053 }
2054
2055 void Method::set_highest_comp_level(int level) {
2056 MethodCounters* mcs = method_counters();
2057 if (mcs != nullptr) {
2058 mcs->set_highest_comp_level(level);
2059 }
2060 }
|