36 #include "code/debugInfoRec.hpp"
37 #include "compiler/compilationPolicy.hpp"
38 #include "gc/shared/collectedHeap.inline.hpp"
39 #include "interpreter/bytecodes.hpp"
40 #include "interpreter/bytecodeStream.hpp"
41 #include "interpreter/bytecodeTracer.hpp"
42 #include "interpreter/interpreter.hpp"
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/constMethod.hpp"
56 #include "oops/jmethodIDTable.hpp"
57 #include "oops/klass.inline.hpp"
58 #include "oops/method.inline.hpp"
59 #include "oops/methodData.hpp"
60 #include "oops/objArrayKlass.hpp"
61 #include "oops/objArrayOop.inline.hpp"
62 #include "oops/oop.inline.hpp"
63 #include "oops/symbol.hpp"
64 #include "oops/trainingData.hpp"
65 #include "prims/jvmtiExport.hpp"
66 #include "prims/methodHandles.hpp"
67 #include "runtime/arguments.hpp"
68 #include "runtime/atomicAccess.hpp"
69 #include "runtime/continuationEntry.hpp"
70 #include "runtime/frame.inline.hpp"
71 #include "runtime/handles.inline.hpp"
72 #include "runtime/init.hpp"
73 #include "runtime/java.hpp"
74 #include "runtime/orderAccess.hpp"
75 #include "runtime/perfData.hpp"
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 if (is_abstract()) {
156 return SharedRuntime::throw_AbstractMethodError_entry();
157 }
158 assert(adapter() != nullptr, "must have");
159 return adapter()->get_i2c_entry();
160 }
161
162 address Method::get_c2i_entry() {
163 if (is_abstract()) {
164 return SharedRuntime::get_handle_wrong_method_abstract_stub();
165 }
166 assert(adapter() != nullptr, "must have");
167 return adapter()->get_c2i_entry();
168 }
169
170 address Method::get_c2i_unverified_entry() {
171 if (is_abstract()) {
172 return SharedRuntime::get_handle_wrong_method_abstract_stub();
173 }
174 assert(adapter() != nullptr, "must have");
175 return adapter()->get_c2i_unverified_entry();
176 }
177
178 address Method::get_c2i_no_clinit_check_entry() {
179 if (is_abstract()) {
180 return nullptr;
181 }
182 assert(VM_Version::supports_fast_class_init_checks(), "");
183 assert(adapter() != nullptr, "must have");
184 return adapter()->get_c2i_no_clinit_check_entry();
185 }
186
187 char* Method::name_and_sig_as_C_string() const {
188 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
189 }
190
191 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
192 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
193 }
194
195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
196 const char* klass_name = klass->external_name();
197 int klass_name_len = (int)strlen(klass_name);
386 return code_base();
387 } else {
388 return bcp;
389 }
390 }
391
392 int Method::size(bool is_native) {
393 // If native, then include pointers for native_function and signature_handler
394 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
395 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
396 return align_metadata_size(header_size() + extra_words);
397 }
398
399 Symbol* Method::klass_name() const {
400 return method_holder()->name();
401 }
402
403 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
404 log_trace(aot)("Iter(Method): %p", this);
405
406 if (!method_holder()->is_rewritten()) {
407 it->push(&_constMethod, MetaspaceClosure::_writable);
408 } else {
409 it->push(&_constMethod);
410 }
411 it->push(&_adapter);
412 it->push(&_method_data);
413 it->push(&_method_counters);
414 NOT_PRODUCT(it->push(&_name);)
415 }
416
417 #if INCLUDE_CDS
418 // Attempt to return method to original state. Clear any pointers
419 // (to objects outside the shared spaces). We won't be able to predict
420 // where they should point in a new JVM. Further initialize some
421 // entries now in order allow them to be write protected later.
422
423 void Method::remove_unshareable_info() {
424 unlink_method();
425 if (method_data() != nullptr) {
426 method_data()->remove_unshareable_info();
429 method_counters()->remove_unshareable_info();
430 }
431 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
432 _adapter->remove_unshareable_info();
433 _adapter = nullptr;
434 }
435 JFR_ONLY(REMOVE_METHOD_ID(this);)
436 }
437
438 void Method::restore_unshareable_info(TRAPS) {
439 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
440 if (method_data() != nullptr) {
441 method_data()->restore_unshareable_info(CHECK);
442 }
443 if (method_counters() != nullptr) {
444 method_counters()->restore_unshareable_info(CHECK);
445 }
446 if (_adapter != nullptr) {
447 assert(_adapter->is_linked(), "must be");
448 _from_compiled_entry = _adapter->get_c2i_entry();
449 }
450 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
451 }
452 #endif
453
454 void Method::set_vtable_index(int index) {
455 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
456 // At runtime initialize_vtable is rerun as part of link_class_impl()
457 // for a shared class loaded by the non-boot loader to obtain the loader
458 // constraints based on the runtime classloaders' context.
459 return; // don't write into the shared class
460 } else {
461 _vtable_index = index;
462 }
463 }
464
465 void Method::set_itable_index(int index) {
466 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
467 // At runtime initialize_itable is rerun as part of link_class_impl()
468 // for a shared class loaded by the non-boot loader to obtain the loader
718 bool Method::init_method_counters(MethodCounters* counters) {
719 // Try to install a pointer to MethodCounters, return true on success.
720 return AtomicAccess::replace_if_null(&_method_counters, counters);
721 }
722
723 void Method::set_exception_handler_entered(int handler_bci) {
724 if (ProfileExceptionHandlers) {
725 MethodData* mdo = method_data();
726 if (mdo != nullptr) {
727 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
728 handler_data.set_exception_handler_entered();
729 }
730 }
731 }
732
733 int Method::extra_stack_words() {
734 // not an inline function, to avoid a header dependency on Interpreter
735 return extra_stack_entries() * Interpreter::stackElementSize;
736 }
737
738 bool Method::compute_has_loops_flag() {
739 BytecodeStream bcs(methodHandle(Thread::current(), this));
740 Bytecodes::Code bc;
741
742 while ((bc = bcs.next()) >= 0) {
743 switch (bc) {
744 case Bytecodes::_ifeq:
745 case Bytecodes::_ifnull:
746 case Bytecodes::_iflt:
747 case Bytecodes::_ifle:
748 case Bytecodes::_ifne:
749 case Bytecodes::_ifnonnull:
750 case Bytecodes::_ifgt:
751 case Bytecodes::_ifge:
752 case Bytecodes::_if_icmpeq:
753 case Bytecodes::_if_icmpne:
754 case Bytecodes::_if_icmplt:
755 case Bytecodes::_if_icmpgt:
756 case Bytecodes::_if_icmple:
757 case Bytecodes::_if_icmpge:
866
867 bool Method::is_accessor() const {
868 return is_getter() || is_setter();
869 }
870
871 bool Method::is_getter() const {
872 if (code_size() != 5) return false;
873 if (size_of_parameters() != 1) return false;
874 if (java_code_at(0) != Bytecodes::_aload_0) return false;
875 if (java_code_at(1) != Bytecodes::_getfield) return false;
876 switch (java_code_at(4)) {
877 case Bytecodes::_ireturn:
878 case Bytecodes::_lreturn:
879 case Bytecodes::_freturn:
880 case Bytecodes::_dreturn:
881 case Bytecodes::_areturn:
882 break;
883 default:
884 return false;
885 }
886 return true;
887 }
888
889 bool Method::is_setter() const {
890 if (code_size() != 6) return false;
891 if (java_code_at(0) != Bytecodes::_aload_0) return false;
892 switch (java_code_at(1)) {
893 case Bytecodes::_iload_1:
894 case Bytecodes::_aload_1:
895 case Bytecodes::_fload_1:
896 if (size_of_parameters() != 2) return false;
897 break;
898 case Bytecodes::_dload_1:
899 case Bytecodes::_lload_1:
900 if (size_of_parameters() != 3) return false;
901 break;
902 default:
903 return false;
904 }
905 if (java_code_at(2) != Bytecodes::_putfield) return false;
906 if (java_code_at(5) != Bytecodes::_return) return false;
907 return true;
908 }
909
910 bool Method::is_constant_getter() const {
911 int last_index = code_size() - 1;
912 // Check if the first 1-3 bytecodes are a constant push
913 // and the last bytecode is a return.
914 return (2 <= code_size() && code_size() <= 4 &&
915 Bytecodes::is_const(java_code_at(0)) &&
916 Bytecodes::length_for(java_code_at(0)) == last_index &&
917 Bytecodes::is_return(java_code_at(last_index)));
918 }
919
920 bool Method::has_valid_initializer_flags() const {
921 return (is_static() ||
922 method_holder()->major_version() < 51);
923 }
924
925 bool Method::is_static_initializer() const {
926 // For classfiles version 51 or greater, ensure that the clinit method is
927 // static. Non-static methods with the name "<clinit>" are not static
928 // initializers. (older classfiles exempted for backward compatibility)
929 return name() == vmSymbols::class_initializer_name() &&
930 has_valid_initializer_flags();
931 }
932
933 bool Method::is_object_initializer() const {
934 return name() == vmSymbols::object_initializer_name();
935 }
936
937 bool Method::needs_clinit_barrier() const {
938 return is_static() && !method_holder()->is_initialized();
939 }
940
941 bool Method::is_object_wait0() const {
942 return klass_name() == vmSymbols::java_lang_Object()
943 && name() == vmSymbols::wait_name();
944 }
945
946 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
947 int length = method->checked_exceptions_length();
948 if (length == 0) { // common case
949 return objArrayHandle(THREAD, Universe::the_empty_class_array());
950 } else {
951 methodHandle h_this(THREAD, method);
952 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
953 objArrayHandle mirrors (THREAD, m_oop);
954 for (int i = 0; i < length; i++) {
977 // Not necessarily sorted and not necessarily one-to-one.
978 CompressedLineNumberReadStream stream(compressed_linenumber_table());
979 while (stream.read_pair()) {
980 if (stream.bci() == bci) {
981 // perfect match
982 return stream.line();
983 } else {
984 // update best_bci/line
985 if (stream.bci() < bci && stream.bci() >= best_bci) {
986 best_bci = stream.bci();
987 best_line = stream.line();
988 }
989 }
990 }
991 }
992 return best_line;
993 }
994
995
996 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
997 if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
998 Thread *thread = Thread::current();
999 Symbol* klass_name = constants()->klass_name_at(klass_index);
1000 Handle loader(thread, method_holder()->class_loader());
1001 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
1002 } else {
1003 return true;
1004 }
1005 }
1006
1007
1008 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1009 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1010 if (must_be_resolved) {
1011 // Make sure klass is resolved in constantpool.
1012 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
1013 }
1014 return is_klass_loaded_by_klass_index(klass_index);
1015 }
1016
1017
1018 void Method::set_native_function(address function, bool post_event_flag) {
1019 assert(function != nullptr, "use clear_native_function to unregister natives");
1020 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1021 address* native_function = native_function_addr();
1022
1023 // We can see racers trying to place the same native function into place. Once
1024 // is plenty.
1025 address current = *native_function;
1026 if (current == function) return;
1027 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1028 function != nullptr) {
1029 // native_method_throw_unsatisfied_link_error_entry() should only
1030 // be passed when post_event_flag is false.
1031 assert(function !=
1032 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1160
1161 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1162 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1163 if (comp_level == CompLevel_all) {
1164 set_is_not_c1_osr_compilable();
1165 set_is_not_c2_osr_compilable();
1166 } else {
1167 if (is_c1_compile(comp_level))
1168 set_is_not_c1_osr_compilable();
1169 if (is_c2_compile(comp_level))
1170 set_is_not_c2_osr_compilable();
1171 }
1172 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1173 }
1174
1175 // Revert to using the interpreter and clear out the nmethod
1176 void Method::clear_code() {
1177 // this may be null if c2i adapters have not been made yet
1178 // Only should happen at allocate time.
1179 if (adapter() == nullptr) {
1180 _from_compiled_entry = nullptr;
1181 } else {
1182 _from_compiled_entry = adapter()->get_c2i_entry();
1183 }
1184 OrderAccess::storestore();
1185 _from_interpreted_entry = _i2i_entry;
1186 OrderAccess::storestore();
1187 _code = nullptr;
1188 }
1189
1190 void Method::unlink_code(nmethod *compare) {
1191 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1192 // We need to check if either the _code or _from_compiled_code_entry_point
1193 // refer to this nmethod because there is a race in setting these two fields
1194 // in Method* as seen in bugid 4947125.
1195 if (code() == compare ||
1196 from_compiled_entry() == compare->verified_entry_point()) {
1197 clear_code();
1198 }
1199 }
1200
1201 void Method::unlink_code() {
1202 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1203 clear_code();
1204 }
1205
1206 #if INCLUDE_CDS
1207 // Called by class data sharing to remove any entry points (which are not shared)
1208 void Method::unlink_method() {
1209 assert(CDSConfig::is_dumping_archive(), "sanity");
1210 _code = nullptr;
1211 if (!CDSConfig::is_dumping_adapters()) {
1212 _adapter = nullptr;
1213 }
1214 _i2i_entry = nullptr;
1215 _from_compiled_entry = nullptr;
1216 _from_interpreted_entry = nullptr;
1217
1218 if (is_native()) {
1219 *native_function_addr() = nullptr;
1220 set_signature_handler(nullptr);
1221 }
1222 NOT_PRODUCT(set_compiled_invocation_count(0);)
1223
1224 clear_method_data();
1225 clear_method_counters();
1226 clear_is_not_c1_compilable();
1227 clear_is_not_c1_osr_compilable();
1228 clear_is_not_c2_compilable();
1229 clear_is_not_c2_osr_compilable();
1230 clear_queued_for_compilation();
1231
1232 remove_unshareable_flags();
1233 }
1234
1235 void Method::remove_unshareable_flags() {
1236 // clear all the flags that shouldn't be in the archived version
1237 assert(!is_old(), "must be");
1238 assert(!is_obsolete(), "must be");
1239 assert(!is_deleted(), "must be");
1240
1241 set_is_prefixed_native(false);
1242 set_queued_for_compilation(false);
1243 set_is_not_c2_compilable(false);
1244 set_is_not_c1_compilable(false);
1245 set_is_not_c2_osr_compilable(false);
1246 set_on_stack_flag(false);
1247 }
1248 #endif
1249
1250 // Called when the method_holder is getting linked. Setup entrypoints so the method
1251 // is ready to be called from interpreter, compiler, and vtables.
1252 void Method::link_method(const methodHandle& h_method, TRAPS) {
1253 if (log_is_enabled(Info, perf, class, link)) {
1254 ClassLoader::perf_ik_link_methods_count()->inc();
1255 }
1256
1257 // If the code cache is full, we may reenter this function for the
1258 // leftover methods that weren't linked.
1259 if (adapter() != nullptr) {
1260 if (adapter()->in_aot_cache()) {
1261 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1262 } else {
1263 return;
1264 }
1265 }
1266 assert( _code == nullptr, "nothing compiled yet" );
1267
1268 // Setup interpreter entrypoint
1269 assert(this == h_method(), "wrong h_method()" );
1270
1271 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache");
1272 address entry = Interpreter::entry_for_method(h_method);
1273 assert(entry != nullptr, "interpreter entry must be non-null");
1274 // Sets both _i2i_entry and _from_interpreted_entry
1275 set_interpreter_entry(entry);
1276
1277 // Don't overwrite already registered native entries.
1278 if (is_native() && !has_native_function()) {
1279 set_native_function(
1280 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1281 !native_bind_event_is_interesting);
1282 }
1283
1284 // Setup compiler entrypoint. This is made eagerly, so we do not need
1285 // special handling of vtables. An alternative is to make adapters more
1286 // lazily by calling make_adapter() from from_compiled_entry() for the
1287 // normal calls. For vtable calls life gets more complicated. When a
1288 // call-site goes mega-morphic we need adapters in all methods which can be
1289 // called from the vtable. We need adapters on such methods that get loaded
1290 // later. Ditto for mega-morphic itable calls. If this proves to be a
1291 // problem we'll make these lazily later.
1292 if (is_abstract()) {
1293 h_method->_from_compiled_entry = SharedRuntime::get_handle_wrong_method_abstract_stub();
1294 } else if (_adapter == nullptr) {
1295 (void) make_adapters(h_method, CHECK);
1296 #ifndef ZERO
1297 assert(adapter()->is_linked(), "Adapter must have been linked");
1298 #endif
1299 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1300 }
1301
1302 // ONLY USE the h_method now as make_adapter may have blocked
1303
1304 if (h_method->is_continuation_native_intrinsic()) {
1305 _from_interpreted_entry = nullptr;
1306 _from_compiled_entry = nullptr;
1307 _i2i_entry = nullptr;
1308 if (Continuations::enabled()) {
1309 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1310 AdapterHandlerLibrary::create_native_wrapper(h_method);
1311 if (!h_method->has_compiled_code()) {
1312 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1313 }
1314 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1315 }
1316 }
1317 }
1318
1319 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1320 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1321 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1322
1323 // Adapters for compiled code are made eagerly here. They are fairly
1324 // small (generally < 100 bytes) and quick to make (and cached and shared)
1325 // so making them eagerly shouldn't be too expensive.
1326 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1327 if (adapter == nullptr ) {
1328 if (!is_init_completed()) {
1329 // Don't throw exceptions during VM initialization because java.lang.* classes
1330 // might not have been initialized, causing problems when constructing the
1331 // Java exception object.
1332 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1333 } else {
1334 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1335 }
1336 }
1337
1338 mh->set_adapter_entry(adapter);
1339 return adapter->get_c2i_entry();
1340 }
1341
1342 // The verified_code_entry() must be called when a invoke is resolved
1343 // on this method.
1344
1345 // It returns the compiled code entry point, after asserting not null.
1346 // This function is called after potential safepoints so that nmethod
1347 // or adapter that it points to is still live and valid.
1348 // This function must not hit a safepoint!
1349 address Method::verified_code_entry() {
1350 DEBUG_ONLY(NoSafepointVerifier nsv;)
1351 assert(_from_compiled_entry != nullptr, "must be set");
1352 return _from_compiled_entry;
1353 }
1354
1355 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1356 // (could be racing a deopt).
1357 // Not inline to avoid circular ref.
1358 bool Method::check_code() const {
1359 // cached in a register or local. There's a race on the value of the field.
1360 nmethod *code = AtomicAccess::load_acquire(&_code);
1361 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1362 }
1363
1364 // Install compiled code. Instantly it can execute.
1365 void Method::set_code(const methodHandle& mh, nmethod *code) {
1366 assert_lock_strong(NMethodState_lock);
1367 assert( code, "use clear_code to remove code" );
1368 assert( mh->check_code(), "" );
1369
1370 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1371
1372 // These writes must happen in this order, because the interpreter will
1373 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1374 // which jumps to _from_compiled_entry.
1375 mh->_code = code; // Assign before allowing compiled code to exec
1376
1377 int comp_level = code->comp_level();
1378 // In theory there could be a race here. In practice it is unlikely
1379 // and not worth worrying about.
1380 if (comp_level > mh->highest_comp_level()) {
1381 mh->set_highest_comp_level(comp_level);
1382 }
1383
1384 OrderAccess::storestore();
1385 mh->_from_compiled_entry = code->verified_entry_point();
1386 OrderAccess::storestore();
1387
1388 if (mh->is_continuation_native_intrinsic()) {
1389 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1390
1391 if (mh->is_continuation_enter_intrinsic()) {
1392 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1393 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1394 } else if (mh->is_continuation_yield_intrinsic()) {
1395 mh->_i2i_entry = mh->get_i2c_entry();
1396 } else {
1397 guarantee(false, "Unknown Continuation native intrinsic");
1398 }
1399 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1400 AtomicAccess::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1401 } else if (!mh->is_method_handle_intrinsic()) {
1402 // Instantly compiled code can execute.
1403 mh->_from_interpreted_entry = mh->get_i2c_entry();
1404 }
1405 }
1558 assert(m->can_be_statically_bound(), "");
1559 m->set_vtable_index(Method::nonvirtual_vtable_index);
1560 m->link_method(m, CHECK_(empty));
1561
1562 if (iid == vmIntrinsics::_linkToNative) {
1563 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1564 }
1565 if (log_is_enabled(Debug, methodhandles)) {
1566 LogTarget(Debug, methodhandles) lt;
1567 LogStream ls(lt);
1568 m->print_on(&ls);
1569 }
1570
1571 return m;
1572 }
1573
1574 #if INCLUDE_CDS
1575 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1576 if (m->adapter() != nullptr) {
1577 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1578 }
1579 m->link_method(m, CHECK);
1580
1581 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1582 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1583 }
1584 }
1585 #endif
1586
1587 Klass* Method::check_non_bcp_klass(Klass* klass) {
1588 if (klass != nullptr && klass->class_loader() != nullptr) {
1589 if (klass->is_objArray_klass())
1590 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1591 return klass;
1592 }
1593 return nullptr;
1594 }
1595
1596
1597 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2175 }
2176
2177 // Check that this pointer is valid by checking that the vtbl pointer matches
2178 bool Method::is_valid_method(const Method* m) {
2179 if (m == nullptr) {
2180 return false;
2181 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2182 // Quick sanity check on pointer.
2183 return false;
2184 } else if (!os::is_readable_range(m, m + 1)) {
2185 return false;
2186 } else if (m->in_aot_cache()) {
2187 return CppVtables::is_valid_shared_method(m);
2188 } else if (Metaspace::contains_non_shared(m)) {
2189 return has_method_vptr((const void*)m);
2190 } else {
2191 return false;
2192 }
2193 }
2194
2195 // Printing
2196
2197 #ifndef PRODUCT
2198
2199 void Method::print_on(outputStream* st) const {
2200 ResourceMark rm;
2201 assert(is_method(), "must be method");
2202 st->print_cr("%s", internal_name());
2203 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2204 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2205 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2206 constants()->print_value_on(st); st->cr();
2207 st->print (" - access: 0x%x ", access_flags().as_method_flags()); access_flags().print_on(st); st->cr();
2208 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2209 st->print (" - name: "); name()->print_value_on(st); st->cr();
2210 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2211 st->print_cr(" - max stack: %d", max_stack());
2212 st->print_cr(" - max locals: %d", max_locals());
2213 st->print_cr(" - size of params: %d", size_of_parameters());
2214 st->print_cr(" - method size: %d", method_size());
2215 if (intrinsic_id() != vmIntrinsics::_none)
2216 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2217 if (highest_comp_level() != CompLevel_none)
2218 st->print_cr(" - highest level: %d", highest_comp_level());
2219 st->print_cr(" - vtable index: %d", _vtable_index);
2220 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2221 st->print( " - adapters: ");
2222 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2223 if (a == nullptr)
2224 st->print_cr(PTR_FORMAT, p2i(a));
2225 else
2226 a->print_adapter_on(st);
2227 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2228 st->print_cr(" - code size: %d", code_size());
2229 if (code_size() != 0) {
2230 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2231 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2232 }
2233 if (method_data() != nullptr) {
2234 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2235 }
2236 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2237 if (checked_exceptions_length() > 0) {
2238 CheckedExceptionElement* table = checked_exceptions_start();
2239 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2240 if (Verbose) {
2241 for (int i = 0; i < checked_exceptions_length(); i++) {
2242 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2243 }
2244 }
2245 }
2246 if (has_linenumber_table()) {
2247 u_char* table = compressed_linenumber_table();
2277 st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler()));
2278 }
2279 }
2280
2281 void Method::print_linkage_flags(outputStream* st) {
2282 access_flags().print_on(st);
2283 if (is_default_method()) {
2284 st->print("default ");
2285 }
2286 if (is_overpass()) {
2287 st->print("overpass ");
2288 }
2289 }
2290 #endif //PRODUCT
2291
2292 void Method::print_value_on(outputStream* st) const {
2293 assert(is_method(), "must be method");
2294 st->print("%s", internal_name());
2295 print_address_on(st);
2296 st->print(" ");
2297 name()->print_value_on(st);
2298 st->print(" ");
2299 signature()->print_value_on(st);
2300 st->print(" in ");
2301 method_holder()->print_value_on(st);
2302 if (WizardMode) st->print("#%d", _vtable_index);
2303 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2304 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2305 }
2306
2307 // Verification
2308
2309 void Method::verify_on(outputStream* st) {
2310 guarantee(is_method(), "object must be method");
2311 guarantee(constants()->is_constantPool(), "should be constant pool");
2312 MethodData* md = method_data();
2313 guarantee(md == nullptr ||
2314 md->is_methodData(), "should be method data");
2315 }
|
36 #include "code/debugInfoRec.hpp"
37 #include "compiler/compilationPolicy.hpp"
38 #include "gc/shared/collectedHeap.inline.hpp"
39 #include "interpreter/bytecodes.hpp"
40 #include "interpreter/bytecodeStream.hpp"
41 #include "interpreter/bytecodeTracer.hpp"
42 #include "interpreter/interpreter.hpp"
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/constMethod.hpp"
56 #include "oops/inlineKlass.inline.hpp"
57 #include "oops/jmethodIDTable.hpp"
58 #include "oops/klass.inline.hpp"
59 #include "oops/method.inline.hpp"
60 #include "oops/methodData.hpp"
61 #include "oops/objArrayKlass.hpp"
62 #include "oops/objArrayOop.inline.hpp"
63 #include "oops/oop.inline.hpp"
64 #include "oops/symbol.hpp"
65 #include "oops/trainingData.hpp"
66 #include "prims/jvmtiExport.hpp"
67 #include "prims/methodHandles.hpp"
68 #include "runtime/arguments.hpp"
69 #include "runtime/atomicAccess.hpp"
70 #include "runtime/continuationEntry.hpp"
71 #include "runtime/frame.inline.hpp"
72 #include "runtime/handles.inline.hpp"
73 #include "runtime/init.hpp"
74 #include "runtime/java.hpp"
75 #include "runtime/orderAccess.hpp"
76 #include "runtime/perfData.hpp"
151 method_data()->~MethodData();
152 }
153 }
154
155 address Method::get_i2c_entry() {
156 if (is_abstract()) {
157 return SharedRuntime::throw_AbstractMethodError_entry();
158 }
159 assert(adapter() != nullptr, "must have");
160 return adapter()->get_i2c_entry();
161 }
162
163 address Method::get_c2i_entry() {
164 if (is_abstract()) {
165 return SharedRuntime::get_handle_wrong_method_abstract_stub();
166 }
167 assert(adapter() != nullptr, "must have");
168 return adapter()->get_c2i_entry();
169 }
170
171 address Method::get_c2i_inline_entry() {
172 assert(adapter() != nullptr, "must have");
173 return adapter()->get_c2i_inline_entry();
174 }
175
176 address Method::get_c2i_inline_ro_entry() {
177 assert(adapter() != nullptr, "must have");
178 return adapter()->get_c2i_inline_ro_entry();
179 }
180
181 address Method::get_c2i_unverified_entry() {
182 if (is_abstract()) {
183 return SharedRuntime::get_handle_wrong_method_abstract_stub();
184 }
185 assert(adapter() != nullptr, "must have");
186 return adapter()->get_c2i_unverified_entry();
187 }
188
189 address Method::get_c2i_unverified_inline_entry() {
190 assert(adapter() != nullptr, "must have");
191 return adapter()->get_c2i_unverified_inline_entry();
192 }
193
194 address Method::get_c2i_no_clinit_check_entry() {
195 if (is_abstract()) {
196 return nullptr;
197 }
198 assert(VM_Version::supports_fast_class_init_checks(), "");
199 assert(adapter() != nullptr, "must have");
200 return adapter()->get_c2i_no_clinit_check_entry();
201 }
202
203 char* Method::name_and_sig_as_C_string() const {
204 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
205 }
206
207 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
208 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
209 }
210
211 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
212 const char* klass_name = klass->external_name();
213 int klass_name_len = (int)strlen(klass_name);
402 return code_base();
403 } else {
404 return bcp;
405 }
406 }
407
408 int Method::size(bool is_native) {
409 // If native, then include pointers for native_function and signature_handler
410 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
411 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
412 return align_metadata_size(header_size() + extra_words);
413 }
414
415 Symbol* Method::klass_name() const {
416 return method_holder()->name();
417 }
418
419 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
420 log_trace(aot)("Iter(Method): %p", this);
421
422 if (!method_holder()->is_rewritten() || Arguments::is_valhalla_enabled()) {
423 it->push(&_constMethod, MetaspaceClosure::_writable);
424 } else {
425 it->push(&_constMethod);
426 }
427 it->push(&_adapter);
428 it->push(&_method_data);
429 it->push(&_method_counters);
430 NOT_PRODUCT(it->push(&_name);)
431 }
432
433 #if INCLUDE_CDS
434 // Attempt to return method to original state. Clear any pointers
435 // (to objects outside the shared spaces). We won't be able to predict
436 // where they should point in a new JVM. Further initialize some
437 // entries now in order allow them to be write protected later.
438
439 void Method::remove_unshareable_info() {
440 unlink_method();
441 if (method_data() != nullptr) {
442 method_data()->remove_unshareable_info();
445 method_counters()->remove_unshareable_info();
446 }
447 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
448 _adapter->remove_unshareable_info();
449 _adapter = nullptr;
450 }
451 JFR_ONLY(REMOVE_METHOD_ID(this);)
452 }
453
454 void Method::restore_unshareable_info(TRAPS) {
455 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
456 if (method_data() != nullptr) {
457 method_data()->restore_unshareable_info(CHECK);
458 }
459 if (method_counters() != nullptr) {
460 method_counters()->restore_unshareable_info(CHECK);
461 }
462 if (_adapter != nullptr) {
463 assert(_adapter->is_linked(), "must be");
464 _from_compiled_entry = _adapter->get_c2i_entry();
465 _from_compiled_inline_entry = _adapter->get_c2i_inline_entry();
466 _from_compiled_inline_ro_entry = _adapter->get_c2i_inline_ro_entry();
467 }
468 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
469 }
470 #endif
471
472 void Method::set_vtable_index(int index) {
473 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
474 // At runtime initialize_vtable is rerun as part of link_class_impl()
475 // for a shared class loaded by the non-boot loader to obtain the loader
476 // constraints based on the runtime classloaders' context.
477 return; // don't write into the shared class
478 } else {
479 _vtable_index = index;
480 }
481 }
482
483 void Method::set_itable_index(int index) {
484 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
485 // At runtime initialize_itable is rerun as part of link_class_impl()
486 // for a shared class loaded by the non-boot loader to obtain the loader
736 bool Method::init_method_counters(MethodCounters* counters) {
737 // Try to install a pointer to MethodCounters, return true on success.
738 return AtomicAccess::replace_if_null(&_method_counters, counters);
739 }
740
741 void Method::set_exception_handler_entered(int handler_bci) {
742 if (ProfileExceptionHandlers) {
743 MethodData* mdo = method_data();
744 if (mdo != nullptr) {
745 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
746 handler_data.set_exception_handler_entered();
747 }
748 }
749 }
750
751 int Method::extra_stack_words() {
752 // not an inline function, to avoid a header dependency on Interpreter
753 return extra_stack_entries() * Interpreter::stackElementSize;
754 }
755
756 // InlineKlass the method is declared to return. This must not
757 // safepoint as it is called with references live on the stack at
758 // locations the GC is unaware of.
759 InlineKlass* Method::returns_inline_type() const {
760 assert(InlineTypeReturnedAsFields, "Inline types should never be returned as fields");
761 if (is_native()) {
762 return nullptr;
763 }
764 NoSafepointVerifier nsv;
765 SignatureStream ss(signature());
766 ss.skip_to_return_type();
767 return ss.as_inline_klass(method_holder());
768 }
769
770 bool Method::compute_has_loops_flag() {
771 BytecodeStream bcs(methodHandle(Thread::current(), this));
772 Bytecodes::Code bc;
773
774 while ((bc = bcs.next()) >= 0) {
775 switch (bc) {
776 case Bytecodes::_ifeq:
777 case Bytecodes::_ifnull:
778 case Bytecodes::_iflt:
779 case Bytecodes::_ifle:
780 case Bytecodes::_ifne:
781 case Bytecodes::_ifnonnull:
782 case Bytecodes::_ifgt:
783 case Bytecodes::_ifge:
784 case Bytecodes::_if_icmpeq:
785 case Bytecodes::_if_icmpne:
786 case Bytecodes::_if_icmplt:
787 case Bytecodes::_if_icmpgt:
788 case Bytecodes::_if_icmple:
789 case Bytecodes::_if_icmpge:
898
899 bool Method::is_accessor() const {
900 return is_getter() || is_setter();
901 }
902
903 bool Method::is_getter() const {
904 if (code_size() != 5) return false;
905 if (size_of_parameters() != 1) return false;
906 if (java_code_at(0) != Bytecodes::_aload_0) return false;
907 if (java_code_at(1) != Bytecodes::_getfield) return false;
908 switch (java_code_at(4)) {
909 case Bytecodes::_ireturn:
910 case Bytecodes::_lreturn:
911 case Bytecodes::_freturn:
912 case Bytecodes::_dreturn:
913 case Bytecodes::_areturn:
914 break;
915 default:
916 return false;
917 }
918 if (has_scalarized_return()) {
919 // Don't treat this as (trivial) getter method because the
920 // inline type should be returned in a scalarized form.
921 return false;
922 }
923 return true;
924 }
925
926 bool Method::is_setter() const {
927 if (code_size() != 6) return false;
928 if (java_code_at(0) != Bytecodes::_aload_0) return false;
929 switch (java_code_at(1)) {
930 case Bytecodes::_iload_1:
931 case Bytecodes::_aload_1:
932 case Bytecodes::_fload_1:
933 if (size_of_parameters() != 2) return false;
934 break;
935 case Bytecodes::_dload_1:
936 case Bytecodes::_lload_1:
937 if (size_of_parameters() != 3) return false;
938 break;
939 default:
940 return false;
941 }
942 if (java_code_at(2) != Bytecodes::_putfield) return false;
943 if (java_code_at(5) != Bytecodes::_return) return false;
944 if (has_scalarized_args()) {
945 // Don't treat this as (trivial) setter method because the
946 // inline type argument should be passed in a scalarized form.
947 return false;
948 }
949 return true;
950 }
951
952 bool Method::is_constant_getter() const {
953 int last_index = code_size() - 1;
954 // Check if the first 1-3 bytecodes are a constant push
955 // and the last bytecode is a return.
956 return (2 <= code_size() && code_size() <= 4 &&
957 Bytecodes::is_const(java_code_at(0)) &&
958 Bytecodes::length_for(java_code_at(0)) == last_index &&
959 Bytecodes::is_return(java_code_at(last_index)) &&
960 !has_scalarized_args());
961 }
962
963 bool Method::is_class_initializer() const {
964 // For classfiles version 51 or greater, ensure that the clinit method is
965 // static. Non-static methods with the name "<clinit>" are not static
966 // initializers. (older classfiles exempted for backward compatibility)
967 return (name() == vmSymbols::class_initializer_name() &&
968 (is_static() ||
969 method_holder()->major_version() < 51));
970 }
971
972 // A method named <init>, is a classic object constructor.
973 bool Method::is_object_constructor() const {
974 return name() == vmSymbols::object_initializer_name();
975 }
976
977 bool Method::needs_clinit_barrier() const {
978 return is_static() && !method_holder()->is_initialized();
979 }
980
981 bool Method::is_object_wait0() const {
982 return klass_name() == vmSymbols::java_lang_Object()
983 && name() == vmSymbols::wait_name();
984 }
985
986 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
987 int length = method->checked_exceptions_length();
988 if (length == 0) { // common case
989 return objArrayHandle(THREAD, Universe::the_empty_class_array());
990 } else {
991 methodHandle h_this(THREAD, method);
992 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
993 objArrayHandle mirrors (THREAD, m_oop);
994 for (int i = 0; i < length; i++) {
1017 // Not necessarily sorted and not necessarily one-to-one.
1018 CompressedLineNumberReadStream stream(compressed_linenumber_table());
1019 while (stream.read_pair()) {
1020 if (stream.bci() == bci) {
1021 // perfect match
1022 return stream.line();
1023 } else {
1024 // update best_bci/line
1025 if (stream.bci() < bci && stream.bci() >= best_bci) {
1026 best_bci = stream.bci();
1027 best_line = stream.line();
1028 }
1029 }
1030 }
1031 }
1032 return best_line;
1033 }
1034
1035
1036 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
1037 if( constants()->tag_at(klass_index).is_unresolved_klass()) {
1038 Thread *thread = Thread::current();
1039 Symbol* klass_name = constants()->klass_name_at(klass_index);
1040 Handle loader(thread, method_holder()->class_loader());
1041 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
1042 } else {
1043 return true;
1044 }
1045 }
1046
1047
1048 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1049 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1050 if (must_be_resolved) {
1051 // Make sure klass is resolved in constantpool.
1052 if (constants()->tag_at(klass_index).is_unresolved_klass()) {
1053 return false;
1054 }
1055 }
1056 return is_klass_loaded_by_klass_index(klass_index);
1057 }
1058
1059
1060 void Method::set_native_function(address function, bool post_event_flag) {
1061 assert(function != nullptr, "use clear_native_function to unregister natives");
1062 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1063 address* native_function = native_function_addr();
1064
1065 // We can see racers trying to place the same native function into place. Once
1066 // is plenty.
1067 address current = *native_function;
1068 if (current == function) return;
1069 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1070 function != nullptr) {
1071 // native_method_throw_unsatisfied_link_error_entry() should only
1072 // be passed when post_event_flag is false.
1073 assert(function !=
1074 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1202
1203 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1204 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1205 if (comp_level == CompLevel_all) {
1206 set_is_not_c1_osr_compilable();
1207 set_is_not_c2_osr_compilable();
1208 } else {
1209 if (is_c1_compile(comp_level))
1210 set_is_not_c1_osr_compilable();
1211 if (is_c2_compile(comp_level))
1212 set_is_not_c2_osr_compilable();
1213 }
1214 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1215 }
1216
1217 // Revert to using the interpreter and clear out the nmethod
1218 void Method::clear_code() {
1219 // this may be null if c2i adapters have not been made yet
1220 // Only should happen at allocate time.
1221 if (adapter() == nullptr) {
1222 _from_compiled_entry = nullptr;
1223 _from_compiled_inline_entry = nullptr;
1224 _from_compiled_inline_ro_entry = nullptr;
1225 } else {
1226 _from_compiled_entry = adapter()->get_c2i_entry();
1227 _from_compiled_inline_entry = adapter()->get_c2i_inline_entry();
1228 _from_compiled_inline_ro_entry = adapter()->get_c2i_inline_ro_entry();
1229 }
1230 OrderAccess::storestore();
1231 _from_interpreted_entry = _i2i_entry;
1232 OrderAccess::storestore();
1233 _code = nullptr;
1234 }
1235
1236 void Method::unlink_code(nmethod *compare) {
1237 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1238 // We need to check if either the _code or _from_compiled_code_entry_point
1239 // refer to this nmethod because there is a race in setting these two fields
1240 // in Method* as seen in bugid 4947125.
1241 if (code() == compare ||
1242 from_compiled_entry() == compare->verified_entry_point()) {
1243 clear_code();
1244 }
1245 }
1246
1247 void Method::unlink_code() {
1248 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1249 clear_code();
1250 }
1251
1252 #if INCLUDE_CDS
1253 // Called by class data sharing to remove any entry points (which are not shared)
1254 void Method::unlink_method() {
1255 assert(CDSConfig::is_dumping_archive(), "sanity");
1256 _code = nullptr;
1257 if (!CDSConfig::is_dumping_adapters()) {
1258 _adapter = nullptr;
1259 }
1260 _i2i_entry = nullptr;
1261 _from_compiled_entry = nullptr;
1262 _from_compiled_inline_entry = nullptr;
1263 _from_compiled_inline_ro_entry = nullptr;
1264 _from_interpreted_entry = nullptr;
1265
1266 if (is_native()) {
1267 *native_function_addr() = nullptr;
1268 set_signature_handler(nullptr);
1269 }
1270 NOT_PRODUCT(set_compiled_invocation_count(0);)
1271
1272 clear_method_data();
1273 clear_method_counters();
1274 clear_is_not_c1_compilable();
1275 clear_is_not_c1_osr_compilable();
1276 clear_is_not_c2_compilable();
1277 clear_is_not_c2_osr_compilable();
1278 clear_queued_for_compilation();
1279
1280 remove_unshareable_flags();
1281 }
1282
1283 void Method::remove_unshareable_flags() {
1284 // clear all the flags that shouldn't be in the archived version
1285 assert(!is_old(), "must be");
1286 assert(!is_obsolete(), "must be");
1287 assert(!is_deleted(), "must be");
1288
1289 set_is_prefixed_native(false);
1290 set_queued_for_compilation(false);
1291 set_is_not_c2_compilable(false);
1292 set_is_not_c1_compilable(false);
1293 set_is_not_c2_osr_compilable(false);
1294 set_on_stack_flag(false);
1295 set_has_scalarized_args(false);
1296 set_has_scalarized_return(false);
1297 }
1298 #endif
1299
1300 // Called when the method_holder is getting linked. Setup entrypoints so the method
1301 // is ready to be called from interpreter, compiler, and vtables.
1302 void Method::link_method(const methodHandle& h_method, TRAPS) {
1303 if (log_is_enabled(Info, perf, class, link)) {
1304 ClassLoader::perf_ik_link_methods_count()->inc();
1305 }
1306
1307 // If the code cache is full, we may reenter this function for the
1308 // leftover methods that weren't linked.
1309 if (adapter() != nullptr) {
1310 if (adapter()->in_aot_cache()) {
1311 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1312 } else {
1313 return;
1314 }
1315 }
1316 assert( _code == nullptr, "nothing compiled yet" );
1317
1318 // Setup interpreter entrypoint
1319 assert(this == h_method(), "wrong h_method()" );
1320
1321 assert(adapter() == nullptr || adapter()->is_linked(), "init'd to null or restored from cache");
1322 address entry = Interpreter::entry_for_method(h_method);
1323 assert(entry != nullptr, "interpreter entry must be non-null");
1324 // Sets both _i2i_entry and _from_interpreted_entry
1325 set_interpreter_entry(entry);
1326
1327 // Don't overwrite already registered native entries.
1328 if (is_native() && !has_native_function()) {
1329 set_native_function(
1330 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1331 !native_bind_event_is_interesting);
1332 }
1333 if (InlineTypeReturnedAsFields && returns_inline_type() && !has_scalarized_return()) {
1334 set_has_scalarized_return();
1335 }
1336
1337 // Setup compiler entrypoint. This is made eagerly, so we do not need
1338 // special handling of vtables. An alternative is to make adapters more
1339 // lazily by calling make_adapter() from from_compiled_entry() for the
1340 // normal calls. For vtable calls life gets more complicated. When a
1341 // call-site goes mega-morphic we need adapters in all methods which can be
1342 // called from the vtable. We need adapters on such methods that get loaded
1343 // later. Ditto for mega-morphic itable calls. If this proves to be a
1344 // problem we'll make these lazily later.
1345 if (is_abstract()) {
1346 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
1347 h_method->_from_compiled_entry = wrong_method_abstract;
1348 h_method->_from_compiled_inline_entry = wrong_method_abstract;
1349 h_method->_from_compiled_inline_ro_entry = wrong_method_abstract;
1350 } else if (_adapter == nullptr) {
1351 (void) make_adapters(h_method, CHECK);
1352 #ifndef ZERO
1353 assert(adapter()->is_linked(), "Adapter must have been linked");
1354 #endif
1355 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1356 h_method->_from_compiled_inline_entry = adapter()->get_c2i_inline_entry();
1357 h_method->_from_compiled_inline_ro_entry = adapter()->get_c2i_inline_ro_entry();
1358 }
1359
1360 // ONLY USE the h_method now as make_adapter may have blocked
1361
1362 if (h_method->is_continuation_native_intrinsic()) {
1363 _from_interpreted_entry = nullptr;
1364 _from_compiled_entry = nullptr;
1365 _i2i_entry = nullptr;
1366 if (Continuations::enabled()) {
1367 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1368 AdapterHandlerLibrary::create_native_wrapper(h_method);
1369 if (!h_method->has_compiled_code()) {
1370 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1371 }
1372 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1373 }
1374 }
1375 }
1376
1377 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1378 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1379 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1380
1381 // Adapters for compiled code are made eagerly here. They are fairly
1382 // small (generally < 100 bytes) and quick to make (and cached and shared)
1383 // so making them eagerly shouldn't be too expensive.
1384 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1385 if (adapter == nullptr ) {
1386 if (!is_init_completed()) {
1387 // Don't throw exceptions during VM initialization because java.lang.* classes
1388 // might not have been initialized, causing problems when constructing the
1389 // Java exception object.
1390 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1391 } else {
1392 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1393 }
1394 }
1395
1396 assert(!mh->has_scalarized_args() || adapter->get_sig_cc() != nullptr, "sigcc should not be null here");
1397
1398 mh->set_adapter_entry(adapter);
1399 return adapter->get_c2i_entry();
1400 }
1401
1402 // The verified_code_entry() must be called when a invoke is resolved
1403 // on this method.
1404
1405 // It returns the compiled code entry point, after asserting not null.
1406 // This function is called after potential safepoints so that nmethod
1407 // or adapter that it points to is still live and valid.
1408 // This function must not hit a safepoint!
1409 address Method::verified_code_entry() {
1410 DEBUG_ONLY(NoSafepointVerifier nsv;)
1411 assert(_from_compiled_entry != nullptr, "must be set");
1412 return _from_compiled_entry;
1413 }
1414
1415 address Method::verified_inline_code_entry() {
1416 DEBUG_ONLY(NoSafepointVerifier nsv;)
1417 assert(_from_compiled_inline_entry != nullptr, "must be set");
1418 return _from_compiled_inline_entry;
1419 }
1420
1421 address Method::verified_inline_ro_code_entry() {
1422 DEBUG_ONLY(NoSafepointVerifier nsv;)
1423 assert(_from_compiled_inline_ro_entry != nullptr, "must be set");
1424 return _from_compiled_inline_ro_entry;
1425 }
1426
1427 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1428 // (could be racing a deopt).
1429 // Not inline to avoid circular ref.
1430 bool Method::check_code() const {
1431 // cached in a register or local. There's a race on the value of the field.
1432 nmethod *code = AtomicAccess::load_acquire(&_code);
1433 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1434 }
1435
1436 // Install compiled code. Instantly it can execute.
1437 void Method::set_code(const methodHandle& mh, nmethod *code) {
1438 assert_lock_strong(NMethodState_lock);
1439 assert( code, "use clear_code to remove code" );
1440 assert( mh->check_code(), "" );
1441
1442 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1443
1444 // These writes must happen in this order, because the interpreter will
1445 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1446 // which jumps to _from_compiled_entry.
1447 mh->_code = code; // Assign before allowing compiled code to exec
1448
1449 int comp_level = code->comp_level();
1450 // In theory there could be a race here. In practice it is unlikely
1451 // and not worth worrying about.
1452 if (comp_level > mh->highest_comp_level()) {
1453 mh->set_highest_comp_level(comp_level);
1454 }
1455
1456 OrderAccess::storestore();
1457 mh->_from_compiled_entry = code->verified_entry_point();
1458 mh->_from_compiled_inline_entry = code->verified_inline_entry_point();
1459 mh->_from_compiled_inline_ro_entry = code->verified_inline_ro_entry_point();
1460 OrderAccess::storestore();
1461
1462 if (mh->is_continuation_native_intrinsic()) {
1463 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1464
1465 if (mh->is_continuation_enter_intrinsic()) {
1466 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1467 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1468 } else if (mh->is_continuation_yield_intrinsic()) {
1469 mh->_i2i_entry = mh->get_i2c_entry();
1470 } else {
1471 guarantee(false, "Unknown Continuation native intrinsic");
1472 }
1473 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1474 AtomicAccess::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1475 } else if (!mh->is_method_handle_intrinsic()) {
1476 // Instantly compiled code can execute.
1477 mh->_from_interpreted_entry = mh->get_i2c_entry();
1478 }
1479 }
1632 assert(m->can_be_statically_bound(), "");
1633 m->set_vtable_index(Method::nonvirtual_vtable_index);
1634 m->link_method(m, CHECK_(empty));
1635
1636 if (iid == vmIntrinsics::_linkToNative) {
1637 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1638 }
1639 if (log_is_enabled(Debug, methodhandles)) {
1640 LogTarget(Debug, methodhandles) lt;
1641 LogStream ls(lt);
1642 m->print_on(&ls);
1643 }
1644
1645 return m;
1646 }
1647
1648 #if INCLUDE_CDS
1649 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1650 if (m->adapter() != nullptr) {
1651 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1652 m->set_from_compiled_inline_entry(m->adapter()->get_c2i_inline_entry());
1653 m->set_from_compiled_inline_ro_entry(m->adapter()->get_c2i_inline_ro_entry());
1654 }
1655 m->link_method(m, CHECK);
1656
1657 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1658 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1659 }
1660 }
1661 #endif
1662
1663 Klass* Method::check_non_bcp_klass(Klass* klass) {
1664 if (klass != nullptr && klass->class_loader() != nullptr) {
1665 if (klass->is_objArray_klass())
1666 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1667 return klass;
1668 }
1669 return nullptr;
1670 }
1671
1672
1673 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2251 }
2252
2253 // Check that this pointer is valid by checking that the vtbl pointer matches
2254 bool Method::is_valid_method(const Method* m) {
2255 if (m == nullptr) {
2256 return false;
2257 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2258 // Quick sanity check on pointer.
2259 return false;
2260 } else if (!os::is_readable_range(m, m + 1)) {
2261 return false;
2262 } else if (m->in_aot_cache()) {
2263 return CppVtables::is_valid_shared_method(m);
2264 } else if (Metaspace::contains_non_shared(m)) {
2265 return has_method_vptr((const void*)m);
2266 } else {
2267 return false;
2268 }
2269 }
2270
2271 bool Method::is_scalarized_arg(int idx) const {
2272 if (!has_scalarized_args()) {
2273 return false;
2274 }
2275 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID
2276 int depth = 0;
2277 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc();
2278 for (int i = 0; i < sig->length(); i++) {
2279 BasicType bt = sig->at(i)._bt;
2280 if (bt == T_METADATA) {
2281 depth++;
2282 }
2283 if (idx == 0) {
2284 break; // Argument found
2285 }
2286 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) {
2287 depth--;
2288 }
2289 if (depth == 0 && bt != T_LONG && bt != T_DOUBLE) {
2290 idx--; // Advance to next argument
2291 }
2292 }
2293 return depth != 0;
2294 }
2295
2296 bool Method::is_scalarized_buffer_arg(int idx) const {
2297 if (!has_scalarized_args()) {
2298 return false;
2299 }
2300 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID
2301 int depth = 0;
2302 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc();
2303 for (int i = 0; i < sig->length(); i++) {
2304 BasicType bt = sig->at(i)._bt;
2305 if (bt == T_METADATA) {
2306 depth++;
2307 continue;
2308 }
2309 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) {
2310 depth--;
2311 continue;
2312 }
2313 if (idx == 0) {
2314 if (sig->at(i)._vt_oop) {
2315 assert(depth == 1, "only for root value");
2316 return true;
2317 }
2318 break; // Argument found
2319 }
2320 idx--; // Advance to next argument
2321 }
2322 return false;
2323 }
2324
2325 // Printing
2326
2327 #ifndef PRODUCT
2328
2329 void Method::print_on(outputStream* st) const {
2330 ResourceMark rm;
2331 assert(is_method(), "must be method");
2332 st->print_cr("%s", internal_name());
2333 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2334 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2335 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2336 constants()->print_value_on(st); st->cr();
2337 st->print (" - access: 0x%x ", access_flags().as_method_flags()); access_flags().print_on(st); st->cr();
2338 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2339 st->print (" - name: "); name()->print_value_on(st); st->cr();
2340 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2341 st->print_cr(" - max stack: %d", max_stack());
2342 st->print_cr(" - max locals: %d", max_locals());
2343 st->print_cr(" - size of params: %d", size_of_parameters());
2344 st->print_cr(" - method size: %d", method_size());
2345 if (intrinsic_id() != vmIntrinsics::_none)
2346 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2347 if (highest_comp_level() != CompLevel_none)
2348 st->print_cr(" - highest level: %d", highest_comp_level());
2349 st->print_cr(" - vtable index: %d", _vtable_index);
2350 #ifdef ASSERT
2351 if (valid_itable_index())
2352 st->print_cr(" - itable index: %d", itable_index());
2353 #endif
2354 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2355 st->print( " - adapters: ");
2356 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2357 if (a == nullptr)
2358 st->print_cr(PTR_FORMAT, p2i(a));
2359 else
2360 a->print_adapter_on(st);
2361 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2362 st->print_cr(" - compiled inline entry " PTR_FORMAT, p2i(from_compiled_inline_entry()));
2363 st->print_cr(" - compiled inline ro entry " PTR_FORMAT, p2i(from_compiled_inline_ro_entry()));
2364 st->print_cr(" - code size: %d", code_size());
2365 if (code_size() != 0) {
2366 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2367 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2368 }
2369 if (method_data() != nullptr) {
2370 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2371 }
2372 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2373 if (checked_exceptions_length() > 0) {
2374 CheckedExceptionElement* table = checked_exceptions_start();
2375 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2376 if (Verbose) {
2377 for (int i = 0; i < checked_exceptions_length(); i++) {
2378 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2379 }
2380 }
2381 }
2382 if (has_linenumber_table()) {
2383 u_char* table = compressed_linenumber_table();
2413 st->print_cr(" - signature handler: " PTR_FORMAT, p2i(signature_handler()));
2414 }
2415 }
2416
2417 void Method::print_linkage_flags(outputStream* st) {
2418 access_flags().print_on(st);
2419 if (is_default_method()) {
2420 st->print("default ");
2421 }
2422 if (is_overpass()) {
2423 st->print("overpass ");
2424 }
2425 }
2426 #endif //PRODUCT
2427
2428 void Method::print_value_on(outputStream* st) const {
2429 assert(is_method(), "must be method");
2430 st->print("%s", internal_name());
2431 print_address_on(st);
2432 st->print(" ");
2433 if (WizardMode) access_flags().print_on(st);
2434 name()->print_value_on(st);
2435 st->print(" ");
2436 signature()->print_value_on(st);
2437 st->print(" in ");
2438 method_holder()->print_value_on(st);
2439 if (WizardMode) st->print("#%d", _vtable_index);
2440 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2441 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2442 }
2443
2444 // Verification
2445
2446 void Method::verify_on(outputStream* st) {
2447 guarantee(is_method(), "object must be method");
2448 guarantee(constants()->is_constantPool(), "should be constant pool");
2449 MethodData* md = method_data();
2450 guarantee(md == nullptr ||
2451 md->is_methodData(), "should be method data");
2452 }
|