44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/jmethodIDTable.hpp"
57 #include "oops/klass.inline.hpp"
58 #include "oops/method.inline.hpp"
59 #include "oops/methodData.hpp"
60 #include "oops/objArrayKlass.hpp"
61 #include "oops/objArrayOop.inline.hpp"
62 #include "oops/oop.inline.hpp"
63 #include "oops/symbol.hpp"
64 #include "oops/trainingData.hpp"
65 #include "prims/jvmtiExport.hpp"
66 #include "prims/methodHandles.hpp"
67 #include "runtime/atomic.hpp"
68 #include "runtime/arguments.hpp"
69 #include "runtime/continuationEntry.hpp"
70 #include "runtime/frame.inline.hpp"
71 #include "runtime/handles.inline.hpp"
72 #include "runtime/init.hpp"
73 #include "runtime/java.hpp"
74 #include "runtime/orderAccess.hpp"
75 #include "runtime/perfData.hpp"
76 #include "runtime/relocator.hpp"
77 #include "runtime/safepointVerifiers.hpp"
78 #include "runtime/sharedRuntime.hpp"
79 #include "runtime/signature.hpp"
80 #include "runtime/threads.hpp"
81 #include "runtime/vm_version.hpp"
82 #include "utilities/align.hpp"
83 #include "utilities/quickSort.hpp"
105 }
106
107 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
108 NoSafepointVerifier no_safepoint;
109 set_constMethod(xconst);
110 set_access_flags(access_flags);
111 set_intrinsic_id(vmIntrinsics::_none);
112 clear_method_data();
113 clear_method_counters();
114 set_vtable_index(Method::garbage_vtable_index);
115
116 // Fix and bury in Method*
117 set_interpreter_entry(nullptr); // sets i2i entry and from_int
118 set_adapter_entry(nullptr);
119 Method::clear_code(); // from_c/from_i get set to c2i/i2i
120
121 if (access_flags.is_native()) {
122 clear_native_function();
123 set_signature_handler(nullptr);
124 }
125
126 NOT_PRODUCT(set_compiled_invocation_count(0);)
127 // Name is very useful for debugging.
128 NOT_PRODUCT(_name = name;)
129 }
130
131 // Release Method*. The nmethod will be gone when we get here because
132 // we've walked the code cache.
133 void Method::deallocate_contents(ClassLoaderData* loader_data) {
134 MetadataFactory::free_metadata(loader_data, constMethod());
135 set_constMethod(nullptr);
136 MetadataFactory::free_metadata(loader_data, method_data());
137 clear_method_data();
138 MetadataFactory::free_metadata(loader_data, method_counters());
139 clear_method_counters();
140 set_adapter_entry(nullptr);
141 // The nmethod will be gone when we get here.
142 if (code() != nullptr) _code = nullptr;
143 }
144
145 void Method::release_C_heap_structures() {
146 if (method_data()) {
147 method_data()->release_C_heap_structures();
148
149 // Destroy MethodData embedded lock
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 assert(adapter() != nullptr, "must have");
156 return adapter()->get_i2c_entry();
157 }
158
159 address Method::get_c2i_entry() {
160 assert(adapter() != nullptr, "must have");
161 return adapter()->get_c2i_entry();
162 }
163
164 address Method::get_c2i_unverified_entry() {
165 assert(adapter() != nullptr, "must have");
166 return adapter()->get_c2i_unverified_entry();
167 }
168
169 address Method::get_c2i_no_clinit_check_entry() {
170 assert(VM_Version::supports_fast_class_init_checks(), "");
171 assert(adapter() != nullptr, "must have");
172 return adapter()->get_c2i_no_clinit_check_entry();
173 }
174
175 char* Method::name_and_sig_as_C_string() const {
176 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
177 }
178
179 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
180 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
181 }
182
183 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
184 const char* klass_name = klass->external_name();
185 int klass_name_len = (int)strlen(klass_name);
186 int method_name_len = method_name->utf8_length();
187 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
188 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
374 return code_base();
375 } else {
376 return bcp;
377 }
378 }
379
380 int Method::size(bool is_native) {
381 // If native, then include pointers for native_function and signature_handler
382 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
383 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
384 return align_metadata_size(header_size() + extra_words);
385 }
386
387 Symbol* Method::klass_name() const {
388 return method_holder()->name();
389 }
390
391 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
392 log_trace(aot)("Iter(Method): %p", this);
393
394 if (!method_holder()->is_rewritten()) {
395 it->push(&_constMethod, MetaspaceClosure::_writable);
396 } else {
397 it->push(&_constMethod);
398 }
399 it->push(&_adapter);
400 it->push(&_method_data);
401 it->push(&_method_counters);
402 NOT_PRODUCT(it->push(&_name);)
403 }
404
405 #if INCLUDE_CDS
406 // Attempt to return method to original state. Clear any pointers
407 // (to objects outside the shared spaces). We won't be able to predict
408 // where they should point in a new JVM. Further initialize some
409 // entries now in order allow them to be write protected later.
410
411 void Method::remove_unshareable_info() {
412 unlink_method();
413 if (method_data() != nullptr) {
414 method_data()->remove_unshareable_info();
417 method_counters()->remove_unshareable_info();
418 }
419 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
420 _adapter->remove_unshareable_info();
421 _adapter = nullptr;
422 }
423 JFR_ONLY(REMOVE_METHOD_ID(this);)
424 }
425
426 void Method::restore_unshareable_info(TRAPS) {
427 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
428 if (method_data() != nullptr) {
429 method_data()->restore_unshareable_info(CHECK);
430 }
431 if (method_counters() != nullptr) {
432 method_counters()->restore_unshareable_info(CHECK);
433 }
434 if (_adapter != nullptr) {
435 assert(_adapter->is_linked(), "must be");
436 _from_compiled_entry = _adapter->get_c2i_entry();
437 }
438 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
439 }
440 #endif
441
442 void Method::set_vtable_index(int index) {
443 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
444 // At runtime initialize_vtable is rerun as part of link_class_impl()
445 // for a shared class loaded by the non-boot loader to obtain the loader
446 // constraints based on the runtime classloaders' context.
447 return; // don't write into the shared class
448 } else {
449 _vtable_index = index;
450 }
451 }
452
453 void Method::set_itable_index(int index) {
454 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
455 // At runtime initialize_itable is rerun as part of link_class_impl()
456 // for a shared class loaded by the non-boot loader to obtain the loader
706 bool Method::init_method_counters(MethodCounters* counters) {
707 // Try to install a pointer to MethodCounters, return true on success.
708 return Atomic::replace_if_null(&_method_counters, counters);
709 }
710
711 void Method::set_exception_handler_entered(int handler_bci) {
712 if (ProfileExceptionHandlers) {
713 MethodData* mdo = method_data();
714 if (mdo != nullptr) {
715 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
716 handler_data.set_exception_handler_entered();
717 }
718 }
719 }
720
721 int Method::extra_stack_words() {
722 // not an inline function, to avoid a header dependency on Interpreter
723 return extra_stack_entries() * Interpreter::stackElementSize;
724 }
725
726 bool Method::compute_has_loops_flag() {
727 BytecodeStream bcs(methodHandle(Thread::current(), this));
728 Bytecodes::Code bc;
729
730 while ((bc = bcs.next()) >= 0) {
731 switch (bc) {
732 case Bytecodes::_ifeq:
733 case Bytecodes::_ifnull:
734 case Bytecodes::_iflt:
735 case Bytecodes::_ifle:
736 case Bytecodes::_ifne:
737 case Bytecodes::_ifnonnull:
738 case Bytecodes::_ifgt:
739 case Bytecodes::_ifge:
740 case Bytecodes::_if_icmpeq:
741 case Bytecodes::_if_icmpne:
742 case Bytecodes::_if_icmplt:
743 case Bytecodes::_if_icmpgt:
744 case Bytecodes::_if_icmple:
745 case Bytecodes::_if_icmpge:
854
855 bool Method::is_accessor() const {
856 return is_getter() || is_setter();
857 }
858
859 bool Method::is_getter() const {
860 if (code_size() != 5) return false;
861 if (size_of_parameters() != 1) return false;
862 if (java_code_at(0) != Bytecodes::_aload_0) return false;
863 if (java_code_at(1) != Bytecodes::_getfield) return false;
864 switch (java_code_at(4)) {
865 case Bytecodes::_ireturn:
866 case Bytecodes::_lreturn:
867 case Bytecodes::_freturn:
868 case Bytecodes::_dreturn:
869 case Bytecodes::_areturn:
870 break;
871 default:
872 return false;
873 }
874 return true;
875 }
876
877 bool Method::is_setter() const {
878 if (code_size() != 6) return false;
879 if (java_code_at(0) != Bytecodes::_aload_0) return false;
880 switch (java_code_at(1)) {
881 case Bytecodes::_iload_1:
882 case Bytecodes::_aload_1:
883 case Bytecodes::_fload_1:
884 if (size_of_parameters() != 2) return false;
885 break;
886 case Bytecodes::_dload_1:
887 case Bytecodes::_lload_1:
888 if (size_of_parameters() != 3) return false;
889 break;
890 default:
891 return false;
892 }
893 if (java_code_at(2) != Bytecodes::_putfield) return false;
894 if (java_code_at(5) != Bytecodes::_return) return false;
895 return true;
896 }
897
898 bool Method::is_constant_getter() const {
899 int last_index = code_size() - 1;
900 // Check if the first 1-3 bytecodes are a constant push
901 // and the last bytecode is a return.
902 return (2 <= code_size() && code_size() <= 4 &&
903 Bytecodes::is_const(java_code_at(0)) &&
904 Bytecodes::length_for(java_code_at(0)) == last_index &&
905 Bytecodes::is_return(java_code_at(last_index)));
906 }
907
908 bool Method::has_valid_initializer_flags() const {
909 return (is_static() ||
910 method_holder()->major_version() < 51);
911 }
912
913 bool Method::is_static_initializer() const {
914 // For classfiles version 51 or greater, ensure that the clinit method is
915 // static. Non-static methods with the name "<clinit>" are not static
916 // initializers. (older classfiles exempted for backward compatibility)
917 return name() == vmSymbols::class_initializer_name() &&
918 has_valid_initializer_flags();
919 }
920
921 bool Method::is_object_initializer() const {
922 return name() == vmSymbols::object_initializer_name();
923 }
924
925 bool Method::needs_clinit_barrier() const {
926 return is_static() && !method_holder()->is_initialized();
927 }
928
929 bool Method::is_object_wait0() const {
930 return klass_name() == vmSymbols::java_lang_Object()
931 && name() == vmSymbols::wait_name();
932 }
933
934 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
935 int length = method->checked_exceptions_length();
936 if (length == 0) { // common case
937 return objArrayHandle(THREAD, Universe::the_empty_class_array());
938 } else {
939 methodHandle h_this(THREAD, method);
940 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
941 objArrayHandle mirrors (THREAD, m_oop);
942 for (int i = 0; i < length; i++) {
965 // Not necessarily sorted and not necessarily one-to-one.
966 CompressedLineNumberReadStream stream(compressed_linenumber_table());
967 while (stream.read_pair()) {
968 if (stream.bci() == bci) {
969 // perfect match
970 return stream.line();
971 } else {
972 // update best_bci/line
973 if (stream.bci() < bci && stream.bci() >= best_bci) {
974 best_bci = stream.bci();
975 best_line = stream.line();
976 }
977 }
978 }
979 }
980 return best_line;
981 }
982
983
984 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
985 if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
986 Thread *thread = Thread::current();
987 Symbol* klass_name = constants()->klass_name_at(klass_index);
988 Handle loader(thread, method_holder()->class_loader());
989 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
990 } else {
991 return true;
992 }
993 }
994
995
996 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
997 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
998 if (must_be_resolved) {
999 // Make sure klass is resolved in constantpool.
1000 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
1001 }
1002 return is_klass_loaded_by_klass_index(klass_index);
1003 }
1004
1005
1006 void Method::set_native_function(address function, bool post_event_flag) {
1007 assert(function != nullptr, "use clear_native_function to unregister natives");
1008 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1009 address* native_function = native_function_addr();
1010
1011 // We can see racers trying to place the same native function into place. Once
1012 // is plenty.
1013 address current = *native_function;
1014 if (current == function) return;
1015 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1016 function != nullptr) {
1017 // native_method_throw_unsatisfied_link_error_entry() should only
1018 // be passed when post_event_flag is false.
1019 assert(function !=
1020 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1149 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1150 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1151 if (comp_level == CompLevel_all) {
1152 set_is_not_c1_osr_compilable();
1153 set_is_not_c2_osr_compilable();
1154 } else {
1155 if (is_c1_compile(comp_level))
1156 set_is_not_c1_osr_compilable();
1157 if (is_c2_compile(comp_level))
1158 set_is_not_c2_osr_compilable();
1159 }
1160 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1161 }
1162
1163 // Revert to using the interpreter and clear out the nmethod
1164 void Method::clear_code() {
1165 // this may be null if c2i adapters have not been made yet
1166 // Only should happen at allocate time.
1167 if (adapter() == nullptr) {
1168 _from_compiled_entry = nullptr;
1169 } else {
1170 _from_compiled_entry = adapter()->get_c2i_entry();
1171 }
1172 OrderAccess::storestore();
1173 _from_interpreted_entry = _i2i_entry;
1174 OrderAccess::storestore();
1175 _code = nullptr;
1176 }
1177
1178 void Method::unlink_code(nmethod *compare) {
1179 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1180 // We need to check if either the _code or _from_compiled_code_entry_point
1181 // refer to this nmethod because there is a race in setting these two fields
1182 // in Method* as seen in bugid 4947125.
1183 if (code() == compare ||
1184 from_compiled_entry() == compare->verified_entry_point()) {
1185 clear_code();
1186 }
1187 }
1188
1189 void Method::unlink_code() {
1190 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1191 clear_code();
1192 }
1193
1194 #if INCLUDE_CDS
1195 // Called by class data sharing to remove any entry points (which are not shared)
1196 void Method::unlink_method() {
1197 assert(CDSConfig::is_dumping_archive(), "sanity");
1198 _code = nullptr;
1199 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1200 _adapter = nullptr;
1201 }
1202 _i2i_entry = nullptr;
1203 _from_compiled_entry = nullptr;
1204 _from_interpreted_entry = nullptr;
1205
1206 if (is_native()) {
1207 *native_function_addr() = nullptr;
1208 set_signature_handler(nullptr);
1209 }
1210 NOT_PRODUCT(set_compiled_invocation_count(0);)
1211
1212 clear_method_data();
1213 clear_method_counters();
1214 clear_is_not_c1_compilable();
1215 clear_is_not_c1_osr_compilable();
1216 clear_is_not_c2_compilable();
1217 clear_is_not_c2_osr_compilable();
1218 clear_queued_for_compilation();
1219
1220 remove_unshareable_flags();
1221 }
1222
1223 void Method::remove_unshareable_flags() {
1224 // clear all the flags that shouldn't be in the archived version
1225 assert(!is_old(), "must be");
1226 assert(!is_obsolete(), "must be");
1227 assert(!is_deleted(), "must be");
1228
1229 set_is_prefixed_native(false);
1230 set_queued_for_compilation(false);
1231 set_is_not_c2_compilable(false);
1232 set_is_not_c1_compilable(false);
1233 set_is_not_c2_osr_compilable(false);
1234 set_on_stack_flag(false);
1235 }
1236 #endif
1237
1238 // Called when the method_holder is getting linked. Setup entrypoints so the method
1239 // is ready to be called from interpreter, compiler, and vtables.
1240 void Method::link_method(const methodHandle& h_method, TRAPS) {
1241 if (log_is_enabled(Info, perf, class, link)) {
1242 ClassLoader::perf_ik_link_methods_count()->inc();
1243 }
1244
1245 // If the code cache is full, we may reenter this function for the
1246 // leftover methods that weren't linked.
1247 if (adapter() != nullptr) {
1248 if (adapter()->is_shared()) {
1249 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1250 } else {
1251 return;
1252 }
1253 }
1254 assert( _code == nullptr, "nothing compiled yet" );
1255
1256 // Setup interpreter entrypoint
1257 assert(this == h_method(), "wrong h_method()" );
1258
1259 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache");
1260 address entry = Interpreter::entry_for_method(h_method);
1261 assert(entry != nullptr, "interpreter entry must be non-null");
1262 // Sets both _i2i_entry and _from_interpreted_entry
1263 set_interpreter_entry(entry);
1264
1265 // Don't overwrite already registered native entries.
1266 if (is_native() && !has_native_function()) {
1267 set_native_function(
1268 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1269 !native_bind_event_is_interesting);
1270 }
1271
1272 // Setup compiler entrypoint. This is made eagerly, so we do not need
1273 // special handling of vtables. An alternative is to make adapters more
1274 // lazily by calling make_adapter() from from_compiled_entry() for the
1275 // normal calls. For vtable calls life gets more complicated. When a
1276 // call-site goes mega-morphic we need adapters in all methods which can be
1277 // called from the vtable. We need adapters on such methods that get loaded
1278 // later. Ditto for mega-morphic itable calls. If this proves to be a
1279 // problem we'll make these lazily later.
1280 if (_adapter == nullptr) {
1281 (void) make_adapters(h_method, CHECK);
1282 assert(adapter()->is_linked(), "Adapter must have been linked");
1283 }
1284
1285 // ONLY USE the h_method now as make_adapter may have blocked
1286
1287 if (h_method->is_continuation_native_intrinsic()) {
1288 _from_interpreted_entry = nullptr;
1289 _from_compiled_entry = nullptr;
1290 _i2i_entry = nullptr;
1302 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1303 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1304
1305 // Adapters for compiled code are made eagerly here. They are fairly
1306 // small (generally < 100 bytes) and quick to make (and cached and shared)
1307 // so making them eagerly shouldn't be too expensive.
1308 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1309 if (adapter == nullptr ) {
1310 if (!is_init_completed()) {
1311 // Don't throw exceptions during VM initialization because java.lang.* classes
1312 // might not have been initialized, causing problems when constructing the
1313 // Java exception object.
1314 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1315 } else {
1316 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1317 }
1318 }
1319
1320 mh->set_adapter_entry(adapter);
1321 mh->_from_compiled_entry = adapter->get_c2i_entry();
1322 return adapter->get_c2i_entry();
1323 }
1324
1325 // The verified_code_entry() must be called when a invoke is resolved
1326 // on this method.
1327
1328 // It returns the compiled code entry point, after asserting not null.
1329 // This function is called after potential safepoints so that nmethod
1330 // or adapter that it points to is still live and valid.
1331 // This function must not hit a safepoint!
1332 address Method::verified_code_entry() {
1333 DEBUG_ONLY(NoSafepointVerifier nsv;)
1334 assert(_from_compiled_entry != nullptr, "must be set");
1335 return _from_compiled_entry;
1336 }
1337
1338 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1339 // (could be racing a deopt).
1340 // Not inline to avoid circular ref.
1341 bool Method::check_code() const {
1342 // cached in a register or local. There's a race on the value of the field.
1343 nmethod *code = Atomic::load_acquire(&_code);
1344 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1345 }
1346
1347 // Install compiled code. Instantly it can execute.
1348 void Method::set_code(const methodHandle& mh, nmethod *code) {
1349 assert_lock_strong(NMethodState_lock);
1350 assert( code, "use clear_code to remove code" );
1351 assert( mh->check_code(), "" );
1352
1353 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1354
1355 // These writes must happen in this order, because the interpreter will
1356 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1357 // which jumps to _from_compiled_entry.
1358 mh->_code = code; // Assign before allowing compiled code to exec
1359
1360 int comp_level = code->comp_level();
1361 // In theory there could be a race here. In practice it is unlikely
1362 // and not worth worrying about.
1363 if (comp_level > mh->highest_comp_level()) {
1364 mh->set_highest_comp_level(comp_level);
1365 }
1366
1367 OrderAccess::storestore();
1368 mh->_from_compiled_entry = code->verified_entry_point();
1369 OrderAccess::storestore();
1370
1371 if (mh->is_continuation_native_intrinsic()) {
1372 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1373
1374 if (mh->is_continuation_enter_intrinsic()) {
1375 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1376 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1377 } else if (mh->is_continuation_yield_intrinsic()) {
1378 mh->_i2i_entry = mh->get_i2c_entry();
1379 } else {
1380 guarantee(false, "Unknown Continuation native intrinsic");
1381 }
1382 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1383 Atomic::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1384 } else if (!mh->is_method_handle_intrinsic()) {
1385 // Instantly compiled code can execute.
1386 mh->_from_interpreted_entry = mh->get_i2c_entry();
1387 }
1388 }
1541 assert(m->can_be_statically_bound(), "");
1542 m->set_vtable_index(Method::nonvirtual_vtable_index);
1543 m->link_method(m, CHECK_(empty));
1544
1545 if (iid == vmIntrinsics::_linkToNative) {
1546 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1547 }
1548 if (log_is_enabled(Debug, methodhandles)) {
1549 LogTarget(Debug, methodhandles) lt;
1550 LogStream ls(lt);
1551 m->print_on(&ls);
1552 }
1553
1554 return m;
1555 }
1556
1557 #if INCLUDE_CDS
1558 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1559 if (m->adapter() != nullptr) {
1560 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1561 }
1562 m->link_method(m, CHECK);
1563
1564 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1565 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1566 }
1567 }
1568 #endif
1569
1570 Klass* Method::check_non_bcp_klass(Klass* klass) {
1571 if (klass != nullptr && klass->class_loader() != nullptr) {
1572 if (klass->is_objArray_klass())
1573 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1574 return klass;
1575 }
1576 return nullptr;
1577 }
1578
1579
1580 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2158 }
2159
2160 // Check that this pointer is valid by checking that the vtbl pointer matches
2161 bool Method::is_valid_method(const Method* m) {
2162 if (m == nullptr) {
2163 return false;
2164 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2165 // Quick sanity check on pointer.
2166 return false;
2167 } else if (!os::is_readable_range(m, m + 1)) {
2168 return false;
2169 } else if (m->is_shared()) {
2170 return CppVtables::is_valid_shared_method(m);
2171 } else if (Metaspace::contains_non_shared(m)) {
2172 return has_method_vptr((const void*)m);
2173 } else {
2174 return false;
2175 }
2176 }
2177
2178 // Printing
2179
2180 #ifndef PRODUCT
2181
2182 void Method::print_on(outputStream* st) const {
2183 ResourceMark rm;
2184 assert(is_method(), "must be method");
2185 st->print_cr("%s", internal_name());
2186 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2187 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2188 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2189 constants()->print_value_on(st); st->cr();
2190 st->print (" - access: 0x%x ", access_flags().as_method_flags()); access_flags().print_on(st); st->cr();
2191 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2192 st->print (" - name: "); name()->print_value_on(st); st->cr();
2193 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2194 st->print_cr(" - max stack: %d", max_stack());
2195 st->print_cr(" - max locals: %d", max_locals());
2196 st->print_cr(" - size of params: %d", size_of_parameters());
2197 st->print_cr(" - method size: %d", method_size());
2198 if (intrinsic_id() != vmIntrinsics::_none)
2199 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2200 if (highest_comp_level() != CompLevel_none)
2201 st->print_cr(" - highest level: %d", highest_comp_level());
2202 st->print_cr(" - vtable index: %d", _vtable_index);
2203 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2204 st->print( " - adapters: ");
2205 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2206 if (a == nullptr)
2207 st->print_cr(PTR_FORMAT, p2i(a));
2208 else
2209 a->print_adapter_on(st);
2210 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2211 st->print_cr(" - code size: %d", code_size());
2212 if (code_size() != 0) {
2213 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2214 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2215 }
2216 if (method_data() != nullptr) {
2217 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2218 }
2219 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2220 if (checked_exceptions_length() > 0) {
2221 CheckedExceptionElement* table = checked_exceptions_start();
2222 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2223 if (Verbose) {
2224 for (int i = 0; i < checked_exceptions_length(); i++) {
2225 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2226 }
2227 }
2228 }
2229 if (has_linenumber_table()) {
2230 u_char* table = compressed_linenumber_table();
2260 st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler()));
2261 }
2262 }
2263
2264 void Method::print_linkage_flags(outputStream* st) {
2265 access_flags().print_on(st);
2266 if (is_default_method()) {
2267 st->print("default ");
2268 }
2269 if (is_overpass()) {
2270 st->print("overpass ");
2271 }
2272 }
2273 #endif //PRODUCT
2274
2275 void Method::print_value_on(outputStream* st) const {
2276 assert(is_method(), "must be method");
2277 st->print("%s", internal_name());
2278 print_address_on(st);
2279 st->print(" ");
2280 name()->print_value_on(st);
2281 st->print(" ");
2282 signature()->print_value_on(st);
2283 st->print(" in ");
2284 method_holder()->print_value_on(st);
2285 if (WizardMode) st->print("#%d", _vtable_index);
2286 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2287 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2288 }
2289
2290 // Verification
2291
2292 void Method::verify_on(outputStream* st) {
2293 guarantee(is_method(), "object must be method");
2294 guarantee(constants()->is_constantPool(), "should be constant pool");
2295 MethodData* md = method_data();
2296 guarantee(md == nullptr ||
2297 md->is_methodData(), "should be method data");
2298 }
|
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constMethod.hpp"
55 #include "oops/constantPool.hpp"
56 #include "oops/jmethodIDTable.hpp"
57 #include "oops/klass.inline.hpp"
58 #include "oops/method.inline.hpp"
59 #include "oops/methodData.hpp"
60 #include "oops/objArrayKlass.hpp"
61 #include "oops/objArrayOop.inline.hpp"
62 #include "oops/oop.inline.hpp"
63 #include "oops/symbol.hpp"
64 #include "oops/inlineKlass.inline.hpp"
65 #include "oops/trainingData.hpp"
66 #include "prims/jvmtiExport.hpp"
67 #include "prims/methodHandles.hpp"
68 #include "runtime/atomic.hpp"
69 #include "runtime/arguments.hpp"
70 #include "runtime/continuationEntry.hpp"
71 #include "runtime/frame.inline.hpp"
72 #include "runtime/handles.inline.hpp"
73 #include "runtime/init.hpp"
74 #include "runtime/java.hpp"
75 #include "runtime/orderAccess.hpp"
76 #include "runtime/perfData.hpp"
77 #include "runtime/relocator.hpp"
78 #include "runtime/safepointVerifiers.hpp"
79 #include "runtime/sharedRuntime.hpp"
80 #include "runtime/signature.hpp"
81 #include "runtime/threads.hpp"
82 #include "runtime/vm_version.hpp"
83 #include "utilities/align.hpp"
84 #include "utilities/quickSort.hpp"
106 }
107
108 Method::Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name) {
109 NoSafepointVerifier no_safepoint;
110 set_constMethod(xconst);
111 set_access_flags(access_flags);
112 set_intrinsic_id(vmIntrinsics::_none);
113 clear_method_data();
114 clear_method_counters();
115 set_vtable_index(Method::garbage_vtable_index);
116
117 // Fix and bury in Method*
118 set_interpreter_entry(nullptr); // sets i2i entry and from_int
119 set_adapter_entry(nullptr);
120 Method::clear_code(); // from_c/from_i get set to c2i/i2i
121
122 if (access_flags.is_native()) {
123 clear_native_function();
124 set_signature_handler(nullptr);
125 }
126 NOT_PRODUCT(set_compiled_invocation_count(0);)
127 // Name is very useful for debugging.
128 NOT_PRODUCT(_name = name;)
129 }
130
131 // Release Method*. The nmethod will be gone when we get here because
132 // we've walked the code cache.
133 void Method::deallocate_contents(ClassLoaderData* loader_data) {
134 MetadataFactory::free_metadata(loader_data, constMethod());
135 set_constMethod(nullptr);
136 MetadataFactory::free_metadata(loader_data, method_data());
137 clear_method_data();
138 MetadataFactory::free_metadata(loader_data, method_counters());
139 clear_method_counters();
140 set_adapter_entry(nullptr);
141 // The nmethod will be gone when we get here.
142 if (code() != nullptr) _code = nullptr;
143 }
144
145 void Method::release_C_heap_structures() {
146 if (method_data()) {
147 method_data()->release_C_heap_structures();
148
149 // Destroy MethodData embedded lock
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 assert(adapter() != nullptr, "must have");
156 return adapter()->get_i2c_entry();
157 }
158
159 address Method::get_c2i_entry() {
160 assert(adapter() != nullptr, "must have");
161 return adapter()->get_c2i_entry();
162 }
163
164 address Method::get_c2i_inline_entry() {
165 assert(adapter() != nullptr, "must have");
166 return adapter()->get_c2i_inline_entry();
167 }
168
169 address Method::get_c2i_unverified_entry() {
170 assert(adapter() != nullptr, "must have");
171 return adapter()->get_c2i_unverified_entry();
172 }
173
174 address Method::get_c2i_unverified_inline_entry() {
175 assert(adapter() != nullptr, "must have");
176 return adapter()->get_c2i_unverified_inline_entry();
177 }
178
179 address Method::get_c2i_no_clinit_check_entry() {
180 assert(VM_Version::supports_fast_class_init_checks(), "");
181 assert(adapter() != nullptr, "must have");
182 return adapter()->get_c2i_no_clinit_check_entry();
183 }
184
185 char* Method::name_and_sig_as_C_string() const {
186 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
187 }
188
189 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
190 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
191 }
192
193 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
194 const char* klass_name = klass->external_name();
195 int klass_name_len = (int)strlen(klass_name);
196 int method_name_len = method_name->utf8_length();
197 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
198 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
384 return code_base();
385 } else {
386 return bcp;
387 }
388 }
389
390 int Method::size(bool is_native) {
391 // If native, then include pointers for native_function and signature_handler
392 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
393 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
394 return align_metadata_size(header_size() + extra_words);
395 }
396
397 Symbol* Method::klass_name() const {
398 return method_holder()->name();
399 }
400
401 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
402 log_trace(aot)("Iter(Method): %p", this);
403
404 if (!method_holder()->is_rewritten() || CDSConfig::is_valhalla_preview()) {
405 it->push(&_constMethod, MetaspaceClosure::_writable);
406 } else {
407 it->push(&_constMethod);
408 }
409 it->push(&_adapter);
410 it->push(&_method_data);
411 it->push(&_method_counters);
412 NOT_PRODUCT(it->push(&_name);)
413 }
414
415 #if INCLUDE_CDS
416 // Attempt to return method to original state. Clear any pointers
417 // (to objects outside the shared spaces). We won't be able to predict
418 // where they should point in a new JVM. Further initialize some
419 // entries now in order allow them to be write protected later.
420
421 void Method::remove_unshareable_info() {
422 unlink_method();
423 if (method_data() != nullptr) {
424 method_data()->remove_unshareable_info();
427 method_counters()->remove_unshareable_info();
428 }
429 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
430 _adapter->remove_unshareable_info();
431 _adapter = nullptr;
432 }
433 JFR_ONLY(REMOVE_METHOD_ID(this);)
434 }
435
436 void Method::restore_unshareable_info(TRAPS) {
437 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
438 if (method_data() != nullptr) {
439 method_data()->restore_unshareable_info(CHECK);
440 }
441 if (method_counters() != nullptr) {
442 method_counters()->restore_unshareable_info(CHECK);
443 }
444 if (_adapter != nullptr) {
445 assert(_adapter->is_linked(), "must be");
446 _from_compiled_entry = _adapter->get_c2i_entry();
447 _from_compiled_inline_entry = _adapter->get_c2i_inline_entry();
448 _from_compiled_inline_ro_entry = _adapter->get_c2i_inline_ro_entry();
449 }
450 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
451 }
452 #endif
453
454 void Method::set_vtable_index(int index) {
455 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
456 // At runtime initialize_vtable is rerun as part of link_class_impl()
457 // for a shared class loaded by the non-boot loader to obtain the loader
458 // constraints based on the runtime classloaders' context.
459 return; // don't write into the shared class
460 } else {
461 _vtable_index = index;
462 }
463 }
464
465 void Method::set_itable_index(int index) {
466 if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
467 // At runtime initialize_itable is rerun as part of link_class_impl()
468 // for a shared class loaded by the non-boot loader to obtain the loader
718 bool Method::init_method_counters(MethodCounters* counters) {
719 // Try to install a pointer to MethodCounters, return true on success.
720 return Atomic::replace_if_null(&_method_counters, counters);
721 }
722
723 void Method::set_exception_handler_entered(int handler_bci) {
724 if (ProfileExceptionHandlers) {
725 MethodData* mdo = method_data();
726 if (mdo != nullptr) {
727 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
728 handler_data.set_exception_handler_entered();
729 }
730 }
731 }
732
733 int Method::extra_stack_words() {
734 // not an inline function, to avoid a header dependency on Interpreter
735 return extra_stack_entries() * Interpreter::stackElementSize;
736 }
737
738 // InlineKlass the method is declared to return. This must not
739 // safepoint as it is called with references live on the stack at
740 // locations the GC is unaware of.
741 InlineKlass* Method::returns_inline_type() const {
742 assert(InlineTypeReturnedAsFields, "Inline types should never be returned as fields");
743 if (is_native()) {
744 return nullptr;
745 }
746 NoSafepointVerifier nsv;
747 SignatureStream ss(signature());
748 ss.skip_to_return_type();
749 return ss.as_inline_klass(method_holder());
750 }
751
752 bool Method::compute_has_loops_flag() {
753 BytecodeStream bcs(methodHandle(Thread::current(), this));
754 Bytecodes::Code bc;
755
756 while ((bc = bcs.next()) >= 0) {
757 switch (bc) {
758 case Bytecodes::_ifeq:
759 case Bytecodes::_ifnull:
760 case Bytecodes::_iflt:
761 case Bytecodes::_ifle:
762 case Bytecodes::_ifne:
763 case Bytecodes::_ifnonnull:
764 case Bytecodes::_ifgt:
765 case Bytecodes::_ifge:
766 case Bytecodes::_if_icmpeq:
767 case Bytecodes::_if_icmpne:
768 case Bytecodes::_if_icmplt:
769 case Bytecodes::_if_icmpgt:
770 case Bytecodes::_if_icmple:
771 case Bytecodes::_if_icmpge:
880
881 bool Method::is_accessor() const {
882 return is_getter() || is_setter();
883 }
884
885 bool Method::is_getter() const {
886 if (code_size() != 5) return false;
887 if (size_of_parameters() != 1) return false;
888 if (java_code_at(0) != Bytecodes::_aload_0) return false;
889 if (java_code_at(1) != Bytecodes::_getfield) return false;
890 switch (java_code_at(4)) {
891 case Bytecodes::_ireturn:
892 case Bytecodes::_lreturn:
893 case Bytecodes::_freturn:
894 case Bytecodes::_dreturn:
895 case Bytecodes::_areturn:
896 break;
897 default:
898 return false;
899 }
900 if (has_scalarized_return()) {
901 // Don't treat this as (trivial) getter method because the
902 // inline type should be returned in a scalarized form.
903 return false;
904 }
905 return true;
906 }
907
908 bool Method::is_setter() const {
909 if (code_size() != 6) return false;
910 if (java_code_at(0) != Bytecodes::_aload_0) return false;
911 switch (java_code_at(1)) {
912 case Bytecodes::_iload_1:
913 case Bytecodes::_aload_1:
914 case Bytecodes::_fload_1:
915 if (size_of_parameters() != 2) return false;
916 break;
917 case Bytecodes::_dload_1:
918 case Bytecodes::_lload_1:
919 if (size_of_parameters() != 3) return false;
920 break;
921 default:
922 return false;
923 }
924 if (java_code_at(2) != Bytecodes::_putfield) return false;
925 if (java_code_at(5) != Bytecodes::_return) return false;
926 if (has_scalarized_args()) {
927 // Don't treat this as (trivial) setter method because the
928 // inline type argument should be passed in a scalarized form.
929 return false;
930 }
931 return true;
932 }
933
934 bool Method::is_constant_getter() const {
935 int last_index = code_size() - 1;
936 // Check if the first 1-3 bytecodes are a constant push
937 // and the last bytecode is a return.
938 return (2 <= code_size() && code_size() <= 4 &&
939 Bytecodes::is_const(java_code_at(0)) &&
940 Bytecodes::length_for(java_code_at(0)) == last_index &&
941 Bytecodes::is_return(java_code_at(last_index)) &&
942 !has_scalarized_args());
943 }
944
945 bool Method::is_class_initializer() const {
946 // For classfiles version 51 or greater, ensure that the clinit method is
947 // static. Non-static methods with the name "<clinit>" are not static
948 // initializers. (older classfiles exempted for backward compatibility)
949 return (name() == vmSymbols::class_initializer_name() &&
950 (is_static() ||
951 method_holder()->major_version() < 51));
952 }
953
954 // A method named <init>, is a classic object constructor.
955 bool Method::is_object_constructor() const {
956 return name() == vmSymbols::object_initializer_name();
957 }
958
959 bool Method::needs_clinit_barrier() const {
960 return is_static() && !method_holder()->is_initialized();
961 }
962
963 bool Method::is_object_wait0() const {
964 return klass_name() == vmSymbols::java_lang_Object()
965 && name() == vmSymbols::wait_name();
966 }
967
968 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
969 int length = method->checked_exceptions_length();
970 if (length == 0) { // common case
971 return objArrayHandle(THREAD, Universe::the_empty_class_array());
972 } else {
973 methodHandle h_this(THREAD, method);
974 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
975 objArrayHandle mirrors (THREAD, m_oop);
976 for (int i = 0; i < length; i++) {
999 // Not necessarily sorted and not necessarily one-to-one.
1000 CompressedLineNumberReadStream stream(compressed_linenumber_table());
1001 while (stream.read_pair()) {
1002 if (stream.bci() == bci) {
1003 // perfect match
1004 return stream.line();
1005 } else {
1006 // update best_bci/line
1007 if (stream.bci() < bci && stream.bci() >= best_bci) {
1008 best_bci = stream.bci();
1009 best_line = stream.line();
1010 }
1011 }
1012 }
1013 }
1014 return best_line;
1015 }
1016
1017
1018 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
1019 if( constants()->tag_at(klass_index).is_unresolved_klass()) {
1020 Thread *thread = Thread::current();
1021 Symbol* klass_name = constants()->klass_name_at(klass_index);
1022 Handle loader(thread, method_holder()->class_loader());
1023 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
1024 } else {
1025 return true;
1026 }
1027 }
1028
1029
1030 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1031 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1032 if (must_be_resolved) {
1033 // Make sure klass is resolved in constantpool.
1034 if (constants()->tag_at(klass_index).is_unresolved_klass()) {
1035 return false;
1036 }
1037 }
1038 return is_klass_loaded_by_klass_index(klass_index);
1039 }
1040
1041
1042 void Method::set_native_function(address function, bool post_event_flag) {
1043 assert(function != nullptr, "use clear_native_function to unregister natives");
1044 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1045 address* native_function = native_function_addr();
1046
1047 // We can see racers trying to place the same native function into place. Once
1048 // is plenty.
1049 address current = *native_function;
1050 if (current == function) return;
1051 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1052 function != nullptr) {
1053 // native_method_throw_unsatisfied_link_error_entry() should only
1054 // be passed when post_event_flag is false.
1055 assert(function !=
1056 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1185 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1186 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1187 if (comp_level == CompLevel_all) {
1188 set_is_not_c1_osr_compilable();
1189 set_is_not_c2_osr_compilable();
1190 } else {
1191 if (is_c1_compile(comp_level))
1192 set_is_not_c1_osr_compilable();
1193 if (is_c2_compile(comp_level))
1194 set_is_not_c2_osr_compilable();
1195 }
1196 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1197 }
1198
1199 // Revert to using the interpreter and clear out the nmethod
1200 void Method::clear_code() {
1201 // this may be null if c2i adapters have not been made yet
1202 // Only should happen at allocate time.
1203 if (adapter() == nullptr) {
1204 _from_compiled_entry = nullptr;
1205 _from_compiled_inline_entry = nullptr;
1206 _from_compiled_inline_ro_entry = nullptr;
1207 } else {
1208 _from_compiled_entry = adapter()->get_c2i_entry();
1209 _from_compiled_inline_entry = adapter()->get_c2i_inline_entry();
1210 _from_compiled_inline_ro_entry = adapter()->get_c2i_inline_ro_entry();
1211 }
1212 OrderAccess::storestore();
1213 _from_interpreted_entry = _i2i_entry;
1214 OrderAccess::storestore();
1215 _code = nullptr;
1216 }
1217
1218 void Method::unlink_code(nmethod *compare) {
1219 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1220 // We need to check if either the _code or _from_compiled_code_entry_point
1221 // refer to this nmethod because there is a race in setting these two fields
1222 // in Method* as seen in bugid 4947125.
1223 if (code() == compare ||
1224 from_compiled_entry() == compare->verified_entry_point()) {
1225 clear_code();
1226 }
1227 }
1228
1229 void Method::unlink_code() {
1230 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1231 clear_code();
1232 }
1233
1234 #if INCLUDE_CDS
1235 // Called by class data sharing to remove any entry points (which are not shared)
1236 void Method::unlink_method() {
1237 assert(CDSConfig::is_dumping_archive(), "sanity");
1238 _code = nullptr;
1239 if (!CDSConfig::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) {
1240 _adapter = nullptr;
1241 }
1242 _i2i_entry = nullptr;
1243 _from_compiled_entry = nullptr;
1244 _from_compiled_inline_entry = nullptr;
1245 _from_compiled_inline_ro_entry = nullptr;
1246 _from_interpreted_entry = nullptr;
1247
1248 if (is_native()) {
1249 *native_function_addr() = nullptr;
1250 set_signature_handler(nullptr);
1251 }
1252 NOT_PRODUCT(set_compiled_invocation_count(0);)
1253
1254 clear_method_data();
1255 clear_method_counters();
1256 clear_is_not_c1_compilable();
1257 clear_is_not_c1_osr_compilable();
1258 clear_is_not_c2_compilable();
1259 clear_is_not_c2_osr_compilable();
1260 clear_queued_for_compilation();
1261
1262 remove_unshareable_flags();
1263 }
1264
1265 void Method::remove_unshareable_flags() {
1266 // clear all the flags that shouldn't be in the archived version
1267 assert(!is_old(), "must be");
1268 assert(!is_obsolete(), "must be");
1269 assert(!is_deleted(), "must be");
1270
1271 set_is_prefixed_native(false);
1272 set_queued_for_compilation(false);
1273 set_is_not_c2_compilable(false);
1274 set_is_not_c1_compilable(false);
1275 set_is_not_c2_osr_compilable(false);
1276 set_on_stack_flag(false);
1277 set_has_scalarized_args(false);
1278 set_has_scalarized_return(false);
1279 }
1280 #endif
1281
1282 // Called when the method_holder is getting linked. Setup entrypoints so the method
1283 // is ready to be called from interpreter, compiler, and vtables.
1284 void Method::link_method(const methodHandle& h_method, TRAPS) {
1285 if (log_is_enabled(Info, perf, class, link)) {
1286 ClassLoader::perf_ik_link_methods_count()->inc();
1287 }
1288
1289 // If the code cache is full, we may reenter this function for the
1290 // leftover methods that weren't linked.
1291 if (adapter() != nullptr) {
1292 if (adapter()->is_shared()) {
1293 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1294 } else {
1295 return;
1296 }
1297 }
1298 assert( _code == nullptr, "nothing compiled yet" );
1299
1300 // Setup interpreter entrypoint
1301 assert(this == h_method(), "wrong h_method()" );
1302
1303 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache");
1304 address entry = Interpreter::entry_for_method(h_method);
1305 assert(entry != nullptr, "interpreter entry must be non-null");
1306 // Sets both _i2i_entry and _from_interpreted_entry
1307 set_interpreter_entry(entry);
1308
1309 // Don't overwrite already registered native entries.
1310 if (is_native() && !has_native_function()) {
1311 set_native_function(
1312 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1313 !native_bind_event_is_interesting);
1314 }
1315 if (InlineTypeReturnedAsFields && returns_inline_type() && !has_scalarized_return()) {
1316 set_has_scalarized_return();
1317 }
1318
1319 // Setup compiler entrypoint. This is made eagerly, so we do not need
1320 // special handling of vtables. An alternative is to make adapters more
1321 // lazily by calling make_adapter() from from_compiled_entry() for the
1322 // normal calls. For vtable calls life gets more complicated. When a
1323 // call-site goes mega-morphic we need adapters in all methods which can be
1324 // called from the vtable. We need adapters on such methods that get loaded
1325 // later. Ditto for mega-morphic itable calls. If this proves to be a
1326 // problem we'll make these lazily later.
1327 if (_adapter == nullptr) {
1328 (void) make_adapters(h_method, CHECK);
1329 assert(adapter()->is_linked(), "Adapter must have been linked");
1330 }
1331
1332 // ONLY USE the h_method now as make_adapter may have blocked
1333
1334 if (h_method->is_continuation_native_intrinsic()) {
1335 _from_interpreted_entry = nullptr;
1336 _from_compiled_entry = nullptr;
1337 _i2i_entry = nullptr;
1349 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1350 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1351
1352 // Adapters for compiled code are made eagerly here. They are fairly
1353 // small (generally < 100 bytes) and quick to make (and cached and shared)
1354 // so making them eagerly shouldn't be too expensive.
1355 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1356 if (adapter == nullptr ) {
1357 if (!is_init_completed()) {
1358 // Don't throw exceptions during VM initialization because java.lang.* classes
1359 // might not have been initialized, causing problems when constructing the
1360 // Java exception object.
1361 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1362 } else {
1363 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1364 }
1365 }
1366
1367 mh->set_adapter_entry(adapter);
1368 mh->_from_compiled_entry = adapter->get_c2i_entry();
1369 mh->_from_compiled_inline_entry = adapter->get_c2i_inline_entry();
1370 mh->_from_compiled_inline_ro_entry = adapter->get_c2i_inline_ro_entry();
1371 return adapter->get_c2i_entry();
1372 }
1373
1374 // The verified_code_entry() must be called when a invoke is resolved
1375 // on this method.
1376
1377 // It returns the compiled code entry point, after asserting not null.
1378 // This function is called after potential safepoints so that nmethod
1379 // or adapter that it points to is still live and valid.
1380 // This function must not hit a safepoint!
1381 address Method::verified_code_entry() {
1382 DEBUG_ONLY(NoSafepointVerifier nsv;)
1383 assert(_from_compiled_entry != nullptr, "must be set");
1384 return _from_compiled_entry;
1385 }
1386
1387 address Method::verified_inline_code_entry() {
1388 DEBUG_ONLY(NoSafepointVerifier nsv;)
1389 assert(_from_compiled_inline_entry != nullptr, "must be set");
1390 return _from_compiled_inline_entry;
1391 }
1392
1393 address Method::verified_inline_ro_code_entry() {
1394 DEBUG_ONLY(NoSafepointVerifier nsv;)
1395 assert(_from_compiled_inline_ro_entry != nullptr, "must be set");
1396 return _from_compiled_inline_ro_entry;
1397 }
1398
1399 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1400 // (could be racing a deopt).
1401 // Not inline to avoid circular ref.
1402 bool Method::check_code() const {
1403 // cached in a register or local. There's a race on the value of the field.
1404 nmethod *code = Atomic::load_acquire(&_code);
1405 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1406 }
1407
1408 // Install compiled code. Instantly it can execute.
1409 void Method::set_code(const methodHandle& mh, nmethod *code) {
1410 assert_lock_strong(NMethodState_lock);
1411 assert( code, "use clear_code to remove code" );
1412 assert( mh->check_code(), "" );
1413
1414 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1415
1416 // These writes must happen in this order, because the interpreter will
1417 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1418 // which jumps to _from_compiled_entry.
1419 mh->_code = code; // Assign before allowing compiled code to exec
1420
1421 int comp_level = code->comp_level();
1422 // In theory there could be a race here. In practice it is unlikely
1423 // and not worth worrying about.
1424 if (comp_level > mh->highest_comp_level()) {
1425 mh->set_highest_comp_level(comp_level);
1426 }
1427
1428 OrderAccess::storestore();
1429 mh->_from_compiled_entry = code->verified_entry_point();
1430 mh->_from_compiled_inline_entry = code->verified_inline_entry_point();
1431 mh->_from_compiled_inline_ro_entry = code->verified_inline_ro_entry_point();
1432 OrderAccess::storestore();
1433
1434 if (mh->is_continuation_native_intrinsic()) {
1435 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1436
1437 if (mh->is_continuation_enter_intrinsic()) {
1438 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1439 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1440 } else if (mh->is_continuation_yield_intrinsic()) {
1441 mh->_i2i_entry = mh->get_i2c_entry();
1442 } else {
1443 guarantee(false, "Unknown Continuation native intrinsic");
1444 }
1445 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1446 Atomic::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1447 } else if (!mh->is_method_handle_intrinsic()) {
1448 // Instantly compiled code can execute.
1449 mh->_from_interpreted_entry = mh->get_i2c_entry();
1450 }
1451 }
1604 assert(m->can_be_statically_bound(), "");
1605 m->set_vtable_index(Method::nonvirtual_vtable_index);
1606 m->link_method(m, CHECK_(empty));
1607
1608 if (iid == vmIntrinsics::_linkToNative) {
1609 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1610 }
1611 if (log_is_enabled(Debug, methodhandles)) {
1612 LogTarget(Debug, methodhandles) lt;
1613 LogStream ls(lt);
1614 m->print_on(&ls);
1615 }
1616
1617 return m;
1618 }
1619
1620 #if INCLUDE_CDS
1621 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1622 if (m->adapter() != nullptr) {
1623 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1624 m->set_from_compiled_inline_entry(m->adapter()->get_c2i_inline_entry());
1625 m->set_from_compiled_inline_ro_entry(m->adapter()->get_c2i_inline_ro_entry());
1626 }
1627 m->link_method(m, CHECK);
1628
1629 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1630 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1631 }
1632 }
1633 #endif
1634
1635 Klass* Method::check_non_bcp_klass(Klass* klass) {
1636 if (klass != nullptr && klass->class_loader() != nullptr) {
1637 if (klass->is_objArray_klass())
1638 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1639 return klass;
1640 }
1641 return nullptr;
1642 }
1643
1644
1645 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2223 }
2224
2225 // Check that this pointer is valid by checking that the vtbl pointer matches
2226 bool Method::is_valid_method(const Method* m) {
2227 if (m == nullptr) {
2228 return false;
2229 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2230 // Quick sanity check on pointer.
2231 return false;
2232 } else if (!os::is_readable_range(m, m + 1)) {
2233 return false;
2234 } else if (m->is_shared()) {
2235 return CppVtables::is_valid_shared_method(m);
2236 } else if (Metaspace::contains_non_shared(m)) {
2237 return has_method_vptr((const void*)m);
2238 } else {
2239 return false;
2240 }
2241 }
2242
2243 bool Method::is_scalarized_arg(int idx) const {
2244 if (!has_scalarized_args()) {
2245 return false;
2246 }
2247 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID
2248 int depth = 0;
2249 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc();
2250 for (int i = 0; i < sig->length(); i++) {
2251 BasicType bt = sig->at(i)._bt;
2252 if (bt == T_METADATA) {
2253 depth++;
2254 }
2255 if (idx == 0) {
2256 break; // Argument found
2257 }
2258 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) {
2259 depth--;
2260 }
2261 if (depth == 0 && bt != T_LONG && bt != T_DOUBLE) {
2262 idx--; // Advance to next argument
2263 }
2264 }
2265 return depth != 0;
2266 }
2267
2268 // Printing
2269
2270 #ifndef PRODUCT
2271
2272 void Method::print_on(outputStream* st) const {
2273 ResourceMark rm;
2274 assert(is_method(), "must be method");
2275 st->print_cr("%s", internal_name());
2276 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2277 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2278 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2279 constants()->print_value_on(st); st->cr();
2280 st->print (" - access: 0x%x ", access_flags().as_method_flags()); access_flags().print_on(st); st->cr();
2281 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2282 st->print (" - name: "); name()->print_value_on(st); st->cr();
2283 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2284 st->print_cr(" - max stack: %d", max_stack());
2285 st->print_cr(" - max locals: %d", max_locals());
2286 st->print_cr(" - size of params: %d", size_of_parameters());
2287 st->print_cr(" - method size: %d", method_size());
2288 if (intrinsic_id() != vmIntrinsics::_none)
2289 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2290 if (highest_comp_level() != CompLevel_none)
2291 st->print_cr(" - highest level: %d", highest_comp_level());
2292 st->print_cr(" - vtable index: %d", _vtable_index);
2293 #ifdef ASSERT
2294 if (valid_itable_index())
2295 st->print_cr(" - itable index: %d", itable_index());
2296 #endif
2297 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2298 st->print( " - adapters: ");
2299 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2300 if (a == nullptr)
2301 st->print_cr(PTR_FORMAT, p2i(a));
2302 else
2303 a->print_adapter_on(st);
2304 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2305 st->print_cr(" - compiled inline entry " PTR_FORMAT, p2i(from_compiled_inline_entry()));
2306 st->print_cr(" - compiled inline ro entry " PTR_FORMAT, p2i(from_compiled_inline_ro_entry()));
2307 st->print_cr(" - code size: %d", code_size());
2308 if (code_size() != 0) {
2309 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2310 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2311 }
2312 if (method_data() != nullptr) {
2313 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2314 }
2315 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2316 if (checked_exceptions_length() > 0) {
2317 CheckedExceptionElement* table = checked_exceptions_start();
2318 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2319 if (Verbose) {
2320 for (int i = 0; i < checked_exceptions_length(); i++) {
2321 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2322 }
2323 }
2324 }
2325 if (has_linenumber_table()) {
2326 u_char* table = compressed_linenumber_table();
2356 st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler()));
2357 }
2358 }
2359
2360 void Method::print_linkage_flags(outputStream* st) {
2361 access_flags().print_on(st);
2362 if (is_default_method()) {
2363 st->print("default ");
2364 }
2365 if (is_overpass()) {
2366 st->print("overpass ");
2367 }
2368 }
2369 #endif //PRODUCT
2370
2371 void Method::print_value_on(outputStream* st) const {
2372 assert(is_method(), "must be method");
2373 st->print("%s", internal_name());
2374 print_address_on(st);
2375 st->print(" ");
2376 if (WizardMode) access_flags().print_on(st);
2377 name()->print_value_on(st);
2378 st->print(" ");
2379 signature()->print_value_on(st);
2380 st->print(" in ");
2381 method_holder()->print_value_on(st);
2382 if (WizardMode) st->print("#%d", _vtable_index);
2383 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2384 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2385 }
2386
2387 // Verification
2388
2389 void Method::verify_on(outputStream* st) {
2390 guarantee(is_method(), "object must be method");
2391 guarantee(constants()->is_constantPool(), "should be constant pool");
2392 MethodData* md = method_data();
2393 guarantee(md == nullptr ||
2394 md->is_methodData(), "should be method data");
2395 }
|