36 #include "code/debugInfoRec.hpp"
37 #include "compiler/compilationPolicy.hpp"
38 #include "gc/shared/collectedHeap.inline.hpp"
39 #include "interpreter/bytecodes.hpp"
40 #include "interpreter/bytecodeStream.hpp"
41 #include "interpreter/bytecodeTracer.hpp"
42 #include "interpreter/interpreter.hpp"
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/constMethod.hpp"
56 #include "oops/jmethodIDTable.hpp"
57 #include "oops/klass.inline.hpp"
58 #include "oops/method.inline.hpp"
59 #include "oops/methodData.hpp"
60 #include "oops/objArrayKlass.hpp"
61 #include "oops/objArrayOop.inline.hpp"
62 #include "oops/oop.inline.hpp"
63 #include "oops/symbol.hpp"
64 #include "oops/trainingData.hpp"
65 #include "prims/jvmtiExport.hpp"
66 #include "prims/methodHandles.hpp"
67 #include "runtime/arguments.hpp"
68 #include "runtime/atomicAccess.hpp"
69 #include "runtime/continuationEntry.hpp"
70 #include "runtime/frame.inline.hpp"
71 #include "runtime/handles.inline.hpp"
72 #include "runtime/init.hpp"
73 #include "runtime/java.hpp"
74 #include "runtime/orderAccess.hpp"
75 #include "runtime/perfData.hpp"
150 method_data()->~MethodData();
151 }
152 }
153
154 address Method::get_i2c_entry() {
155 if (is_abstract()) {
156 return SharedRuntime::throw_AbstractMethodError_entry();
157 }
158 assert(adapter() != nullptr, "must have");
159 return adapter()->get_i2c_entry();
160 }
161
162 address Method::get_c2i_entry() {
163 if (is_abstract()) {
164 return SharedRuntime::get_handle_wrong_method_abstract_stub();
165 }
166 assert(adapter() != nullptr, "must have");
167 return adapter()->get_c2i_entry();
168 }
169
170 address Method::get_c2i_unverified_entry() {
171 if (is_abstract()) {
172 return SharedRuntime::get_handle_wrong_method_abstract_stub();
173 }
174 assert(adapter() != nullptr, "must have");
175 return adapter()->get_c2i_unverified_entry();
176 }
177
178 address Method::get_c2i_no_clinit_check_entry() {
179 if (is_abstract()) {
180 return nullptr;
181 }
182 assert(VM_Version::supports_fast_class_init_checks(), "");
183 assert(adapter() != nullptr, "must have");
184 return adapter()->get_c2i_no_clinit_check_entry();
185 }
186
187 char* Method::name_and_sig_as_C_string(bool use_double_colon) const {
188 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), use_double_colon);
189 }
190
191 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
192 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
193 }
194
195 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, bool use_double_colon) {
196 const char* klass_name = klass->external_name();
197 int klass_name_len = (int)strlen(klass_name);
392 return code_base();
393 } else {
394 return bcp;
395 }
396 }
397
398 int Method::size(bool is_native) {
399 // If native, then include pointers for native_function and signature_handler
400 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
401 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
402 return align_metadata_size(header_size() + extra_words);
403 }
404
405 Symbol* Method::klass_name() const {
406 return method_holder()->name();
407 }
408
409 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
410 log_trace(aot)("Iter(Method): %p", this);
411
412 if (!method_holder()->is_rewritten()) {
413 it->push(&_constMethod, MetaspaceClosure::_writable);
414 } else {
415 it->push(&_constMethod);
416 }
417 it->push(&_adapter);
418 it->push(&_method_data);
419 it->push(&_method_counters);
420 NOT_PRODUCT(it->push(&_name);)
421 }
422
423 #if INCLUDE_CDS
424 // Attempt to return method to original state. Clear any pointers
425 // (to objects outside the shared spaces). We won't be able to predict
426 // where they should point in a new JVM. Further initialize some
427 // entries now in order allow them to be write protected later.
428
429 void Method::remove_unshareable_info() {
430 unlink_method();
431 if (method_data() != nullptr) {
432 method_data()->remove_unshareable_info();
435 method_counters()->remove_unshareable_info();
436 }
437 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
438 _adapter->remove_unshareable_info();
439 _adapter = nullptr;
440 }
441 JFR_ONLY(REMOVE_METHOD_ID(this);)
442 }
443
444 void Method::restore_unshareable_info(TRAPS) {
445 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
446 if (method_data() != nullptr) {
447 method_data()->restore_unshareable_info(CHECK);
448 }
449 if (method_counters() != nullptr) {
450 method_counters()->restore_unshareable_info(CHECK);
451 }
452 if (_adapter != nullptr) {
453 assert(_adapter->is_linked(), "must be");
454 _from_compiled_entry = _adapter->get_c2i_entry();
455 }
456 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
457 }
458 #endif
459
460 void Method::set_vtable_index(int index) {
461 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
462 // At runtime initialize_vtable is rerun as part of link_class_impl()
463 // for a shared class loaded by the non-boot loader to obtain the loader
464 // constraints based on the runtime classloaders' context.
465 return; // don't write into the shared class
466 } else {
467 _vtable_index = index;
468 }
469 }
470
471 void Method::set_itable_index(int index) {
472 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
473 // At runtime initialize_itable is rerun as part of link_class_impl()
474 // for a shared class loaded by the non-boot loader to obtain the loader
724 bool Method::init_method_counters(MethodCounters* counters) {
725 // Try to install a pointer to MethodCounters, return true on success.
726 return AtomicAccess::replace_if_null(&_method_counters, counters);
727 }
728
729 void Method::set_exception_handler_entered(int handler_bci) {
730 if (ProfileExceptionHandlers) {
731 MethodData* mdo = method_data();
732 if (mdo != nullptr) {
733 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
734 handler_data.set_exception_handler_entered();
735 }
736 }
737 }
738
739 int Method::extra_stack_words() {
740 // not an inline function, to avoid a header dependency on Interpreter
741 return extra_stack_entries() * Interpreter::stackElementSize;
742 }
743
744 bool Method::compute_has_loops_flag() {
745 BytecodeStream bcs(methodHandle(Thread::current(), this));
746 Bytecodes::Code bc;
747
748 while ((bc = bcs.next()) >= 0) {
749 switch (bc) {
750 case Bytecodes::_ifeq:
751 case Bytecodes::_ifnull:
752 case Bytecodes::_iflt:
753 case Bytecodes::_ifle:
754 case Bytecodes::_ifne:
755 case Bytecodes::_ifnonnull:
756 case Bytecodes::_ifgt:
757 case Bytecodes::_ifge:
758 case Bytecodes::_if_icmpeq:
759 case Bytecodes::_if_icmpne:
760 case Bytecodes::_if_icmplt:
761 case Bytecodes::_if_icmpgt:
762 case Bytecodes::_if_icmple:
763 case Bytecodes::_if_icmpge:
872
873 bool Method::is_accessor() const {
874 return is_getter() || is_setter();
875 }
876
877 bool Method::is_getter() const {
878 if (code_size() != 5) return false;
879 if (size_of_parameters() != 1) return false;
880 if (java_code_at(0) != Bytecodes::_aload_0) return false;
881 if (java_code_at(1) != Bytecodes::_getfield) return false;
882 switch (java_code_at(4)) {
883 case Bytecodes::_ireturn:
884 case Bytecodes::_lreturn:
885 case Bytecodes::_freturn:
886 case Bytecodes::_dreturn:
887 case Bytecodes::_areturn:
888 break;
889 default:
890 return false;
891 }
892 return true;
893 }
894
895 bool Method::is_setter() const {
896 if (code_size() != 6) return false;
897 if (java_code_at(0) != Bytecodes::_aload_0) return false;
898 switch (java_code_at(1)) {
899 case Bytecodes::_iload_1:
900 case Bytecodes::_aload_1:
901 case Bytecodes::_fload_1:
902 if (size_of_parameters() != 2) return false;
903 break;
904 case Bytecodes::_dload_1:
905 case Bytecodes::_lload_1:
906 if (size_of_parameters() != 3) return false;
907 break;
908 default:
909 return false;
910 }
911 if (java_code_at(2) != Bytecodes::_putfield) return false;
912 if (java_code_at(5) != Bytecodes::_return) return false;
913 return true;
914 }
915
916 bool Method::is_constant_getter() const {
917 int last_index = code_size() - 1;
918 // Check if the first 1-3 bytecodes are a constant push
919 // and the last bytecode is a return.
920 return (2 <= code_size() && code_size() <= 4 &&
921 Bytecodes::is_const(java_code_at(0)) &&
922 Bytecodes::length_for(java_code_at(0)) == last_index &&
923 Bytecodes::is_return(java_code_at(last_index)));
924 }
925
926 bool Method::has_valid_initializer_flags() const {
927 return (is_static() ||
928 method_holder()->major_version() < 51);
929 }
930
931 bool Method::is_static_initializer() const {
932 // For classfiles version 51 or greater, ensure that the clinit method is
933 // static. Non-static methods with the name "<clinit>" are not static
934 // initializers. (older classfiles exempted for backward compatibility)
935 return name() == vmSymbols::class_initializer_name() &&
936 has_valid_initializer_flags();
937 }
938
939 bool Method::is_object_initializer() const {
940 return name() == vmSymbols::object_initializer_name();
941 }
942
943 bool Method::needs_clinit_barrier() const {
944 return is_static() && !method_holder()->is_initialized();
945 }
946
947 bool Method::is_object_wait0() const {
948 return klass_name() == vmSymbols::java_lang_Object()
949 && name() == vmSymbols::wait_name();
950 }
951
952 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
953 int length = method->checked_exceptions_length();
954 if (length == 0) { // common case
955 return objArrayHandle(THREAD, Universe::the_empty_class_array());
956 } else {
957 methodHandle h_this(THREAD, method);
958 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
959 objArrayHandle mirrors (THREAD, m_oop);
960 for (int i = 0; i < length; i++) {
983 // Not necessarily sorted and not necessarily one-to-one.
984 CompressedLineNumberReadStream stream(compressed_linenumber_table());
985 while (stream.read_pair()) {
986 if (stream.bci() == bci) {
987 // perfect match
988 return stream.line();
989 } else {
990 // update best_bci/line
991 if (stream.bci() < bci && stream.bci() >= best_bci) {
992 best_bci = stream.bci();
993 best_line = stream.line();
994 }
995 }
996 }
997 }
998 return best_line;
999 }
1000
1001
1002 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
1003 if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
1004 Thread *thread = Thread::current();
1005 Symbol* klass_name = constants()->klass_name_at(klass_index);
1006 Handle loader(thread, method_holder()->class_loader());
1007 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
1008 } else {
1009 return true;
1010 }
1011 }
1012
1013
1014 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1015 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1016 if (must_be_resolved) {
1017 // Make sure klass is resolved in constantpool.
1018 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
1019 }
1020 return is_klass_loaded_by_klass_index(klass_index);
1021 }
1022
1023
1024 void Method::set_native_function(address function, bool post_event_flag) {
1025 assert(function != nullptr, "use clear_native_function to unregister natives");
1026 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1027 address* native_function = native_function_addr();
1028
1029 // We can see racers trying to place the same native function into place. Once
1030 // is plenty.
1031 address current = *native_function;
1032 if (current == function) return;
1033 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1034 function != nullptr) {
1035 // native_method_throw_unsatisfied_link_error_entry() should only
1036 // be passed when post_event_flag is false.
1037 assert(function !=
1038 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1166
1167 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1168 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1169 if (comp_level == CompLevel_all) {
1170 set_is_not_c1_osr_compilable();
1171 set_is_not_c2_osr_compilable();
1172 } else {
1173 if (is_c1_compile(comp_level))
1174 set_is_not_c1_osr_compilable();
1175 if (is_c2_compile(comp_level))
1176 set_is_not_c2_osr_compilable();
1177 }
1178 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1179 }
1180
1181 // Revert to using the interpreter and clear out the nmethod
1182 void Method::clear_code() {
1183 // this may be null if c2i adapters have not been made yet
1184 // Only should happen at allocate time.
1185 if (adapter() == nullptr) {
1186 _from_compiled_entry = nullptr;
1187 } else {
1188 _from_compiled_entry = adapter()->get_c2i_entry();
1189 }
1190 OrderAccess::storestore();
1191 _from_interpreted_entry = _i2i_entry;
1192 OrderAccess::storestore();
1193 _code = nullptr;
1194 }
1195
1196 void Method::unlink_code(nmethod *compare) {
1197 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1198 // We need to check if either the _code or _from_compiled_code_entry_point
1199 // refer to this nmethod because there is a race in setting these two fields
1200 // in Method* as seen in bugid 4947125.
1201 if (code() == compare ||
1202 from_compiled_entry() == compare->verified_entry_point()) {
1203 clear_code();
1204 }
1205 }
1206
1207 void Method::unlink_code() {
1208 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1209 clear_code();
1210 }
1211
1212 #if INCLUDE_CDS
1213 // Called by class data sharing to remove any entry points (which are not shared)
1214 void Method::unlink_method() {
1215 assert(CDSConfig::is_dumping_archive(), "sanity");
1216 _code = nullptr;
1217 if (!CDSConfig::is_dumping_adapters()) {
1218 _adapter = nullptr;
1219 }
1220 _i2i_entry = nullptr;
1221 _from_compiled_entry = nullptr;
1222 _from_interpreted_entry = nullptr;
1223
1224 if (is_native()) {
1225 *native_function_addr() = nullptr;
1226 set_signature_handler(nullptr);
1227 }
1228 NOT_PRODUCT(set_compiled_invocation_count(0);)
1229
1230 clear_method_data();
1231 clear_method_counters();
1232 clear_is_not_c1_compilable();
1233 clear_is_not_c1_osr_compilable();
1234 clear_is_not_c2_compilable();
1235 clear_is_not_c2_osr_compilable();
1236 clear_queued_for_compilation();
1237
1238 remove_unshareable_flags();
1239 }
1240
1241 void Method::remove_unshareable_flags() {
1242 // clear all the flags that shouldn't be in the archived version
1243 assert(!is_old(), "must be");
1244 assert(!is_obsolete(), "must be");
1245 assert(!is_deleted(), "must be");
1246
1247 set_is_prefixed_native(false);
1248 set_queued_for_compilation(false);
1249 set_is_not_c2_compilable(false);
1250 set_is_not_c1_compilable(false);
1251 set_is_not_c2_osr_compilable(false);
1252 set_on_stack_flag(false);
1253 }
1254 #endif
1255
1256 // Called when the method_holder is getting linked. Setup entrypoints so the method
1257 // is ready to be called from interpreter, compiler, and vtables.
1258 void Method::link_method(const methodHandle& h_method, TRAPS) {
1259 if (log_is_enabled(Info, perf, class, link)) {
1260 ClassLoader::perf_ik_link_methods_count()->inc();
1261 }
1262
1263 // If the code cache is full, we may reenter this function for the
1264 // leftover methods that weren't linked.
1265 if (adapter() != nullptr) {
1266 if (adapter()->in_aot_cache()) {
1267 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1268 } else {
1269 return;
1270 }
1271 }
1272 assert( _code == nullptr, "nothing compiled yet" );
1278 address entry = Interpreter::entry_for_method(h_method);
1279 assert(entry != nullptr, "interpreter entry must be non-null");
1280 // Sets both _i2i_entry and _from_interpreted_entry
1281 set_interpreter_entry(entry);
1282
1283 // Don't overwrite already registered native entries.
1284 if (is_native() && !has_native_function()) {
1285 set_native_function(
1286 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1287 !native_bind_event_is_interesting);
1288 }
1289
1290 // Setup compiler entrypoint. This is made eagerly, so we do not need
1291 // special handling of vtables. An alternative is to make adapters more
1292 // lazily by calling make_adapter() from from_compiled_entry() for the
1293 // normal calls. For vtable calls life gets more complicated. When a
1294 // call-site goes mega-morphic we need adapters in all methods which can be
1295 // called from the vtable. We need adapters on such methods that get loaded
1296 // later. Ditto for mega-morphic itable calls. If this proves to be a
1297 // problem we'll make these lazily later.
1298 if (is_abstract()) {
1299 h_method->_from_compiled_entry = SharedRuntime::get_handle_wrong_method_abstract_stub();
1300 } else if (_adapter == nullptr) {
1301 (void) make_adapters(h_method, CHECK);
1302 #ifndef ZERO
1303 assert(adapter()->is_linked(), "Adapter must have been linked");
1304 #endif
1305 h_method->_from_compiled_entry = adapter()->get_c2i_entry();
1306 }
1307
1308 // ONLY USE the h_method now as make_adapter may have blocked
1309
1310 if (h_method->is_continuation_native_intrinsic()) {
1311 _from_interpreted_entry = nullptr;
1312 _from_compiled_entry = nullptr;
1313 _i2i_entry = nullptr;
1314 if (Continuations::enabled()) {
1315 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1316 AdapterHandlerLibrary::create_native_wrapper(h_method);
1317 if (!h_method->has_compiled_code()) {
1318 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1319 }
1320 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1321 }
1322 }
1323 }
1324
1325 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1326 assert(!mh->is_abstract(), "abstract methods do not have adapters");
1327 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1328
1329 // Adapters for compiled code are made eagerly here. They are fairly
1330 // small (generally < 100 bytes) and quick to make (and cached and shared)
1331 // so making them eagerly shouldn't be too expensive.
1332 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1333 if (adapter == nullptr ) {
1334 if (!is_init_completed()) {
1335 // Don't throw exceptions during VM initialization because java.lang.* classes
1336 // might not have been initialized, causing problems when constructing the
1337 // Java exception object.
1338 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1339 } else {
1340 THROW_MSG_NULL(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1341 }
1342 }
1343
1344 mh->set_adapter_entry(adapter);
1345 return adapter->get_c2i_entry();
1346 }
1347
1348 // The verified_code_entry() must be called when a invoke is resolved
1349 // on this method.
1350
1351 // It returns the compiled code entry point, after asserting not null.
1352 // This function is called after potential safepoints so that nmethod
1353 // or adapter that it points to is still live and valid.
1354 // This function must not hit a safepoint!
1355 address Method::verified_code_entry() {
1356 DEBUG_ONLY(NoSafepointVerifier nsv;)
1357 assert(_from_compiled_entry != nullptr, "must be set");
1358 return _from_compiled_entry;
1359 }
1360
1361 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1362 // (could be racing a deopt).
1363 // Not inline to avoid circular ref.
1364 bool Method::check_code() const {
1365 // cached in a register or local. There's a race on the value of the field.
1366 nmethod *code = AtomicAccess::load_acquire(&_code);
1367 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1368 }
1369
1370 // Install compiled code. Instantly it can execute.
1371 void Method::set_code(const methodHandle& mh, nmethod *code) {
1372 assert_lock_strong(NMethodState_lock);
1373 assert( code, "use clear_code to remove code" );
1374 assert( mh->check_code(), "" );
1375
1376 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1377
1378 // These writes must happen in this order, because the interpreter will
1379 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1380 // which jumps to _from_compiled_entry.
1381 mh->_code = code; // Assign before allowing compiled code to exec
1382
1383 int comp_level = code->comp_level();
1384 // In theory there could be a race here. In practice it is unlikely
1385 // and not worth worrying about.
1386 if (comp_level > mh->highest_comp_level()) {
1387 mh->set_highest_comp_level(comp_level);
1388 }
1389
1390 OrderAccess::storestore();
1391 mh->_from_compiled_entry = code->verified_entry_point();
1392 OrderAccess::storestore();
1393
1394 if (mh->is_continuation_native_intrinsic()) {
1395 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1396
1397 if (mh->is_continuation_enter_intrinsic()) {
1398 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1399 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1400 } else if (mh->is_continuation_yield_intrinsic()) {
1401 mh->_i2i_entry = mh->get_i2c_entry();
1402 } else {
1403 guarantee(false, "Unknown Continuation native intrinsic");
1404 }
1405 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1406 AtomicAccess::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1407 } else if (!mh->is_method_handle_intrinsic()) {
1408 // Instantly compiled code can execute.
1409 mh->_from_interpreted_entry = mh->get_i2c_entry();
1410 }
1411 }
1564 assert(m->can_be_statically_bound(), "");
1565 m->set_vtable_index(Method::nonvirtual_vtable_index);
1566 m->link_method(m, CHECK_(empty));
1567
1568 if (iid == vmIntrinsics::_linkToNative) {
1569 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1570 }
1571 if (log_is_enabled(Debug, methodhandles)) {
1572 LogTarget(Debug, methodhandles) lt;
1573 LogStream ls(lt);
1574 m->print_on(&ls);
1575 }
1576
1577 return m;
1578 }
1579
1580 #if INCLUDE_CDS
1581 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1582 if (m->adapter() != nullptr) {
1583 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1584 }
1585 m->link_method(m, CHECK);
1586
1587 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1588 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1589 }
1590 }
1591 #endif
1592
1593 Klass* Method::check_non_bcp_klass(Klass* klass) {
1594 if (klass != nullptr && klass->class_loader() != nullptr) {
1595 if (klass->is_objArray_klass())
1596 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1597 return klass;
1598 }
1599 return nullptr;
1600 }
1601
1602
1603 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2181 }
2182
2183 // Check that this pointer is valid by checking that the vtbl pointer matches
2184 bool Method::is_valid_method(const Method* m) {
2185 if (m == nullptr) {
2186 return false;
2187 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2188 // Quick sanity check on pointer.
2189 return false;
2190 } else if (!os::is_readable_range(m, m + 1)) {
2191 return false;
2192 } else if (m->in_aot_cache()) {
2193 return CppVtables::is_valid_shared_method(m);
2194 } else if (Metaspace::contains_non_shared(m)) {
2195 return has_method_vptr((const void*)m);
2196 } else {
2197 return false;
2198 }
2199 }
2200
2201 // Printing
2202
2203 #ifndef PRODUCT
2204
2205 void Method::print_on(outputStream* st) const {
2206 ResourceMark rm;
2207 assert(is_method(), "must be method");
2208 st->print_cr("%s", internal_name());
2209 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2210 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2211 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2212 constants()->print_value_on(st); st->cr();
2213 st->print (" - access: 0x%x ", access_flags().as_method_flags()); print_access_flags(st); st->cr();
2214 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2215 st->print (" - name: "); name()->print_value_on(st); st->cr();
2216 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2217 st->print_cr(" - max stack: %d", max_stack());
2218 st->print_cr(" - max locals: %d", max_locals());
2219 st->print_cr(" - size of params: %d", size_of_parameters());
2220 st->print_cr(" - method size: %d", method_size());
2221 if (intrinsic_id() != vmIntrinsics::_none)
2222 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2223 if (highest_comp_level() != CompLevel_none)
2224 st->print_cr(" - highest level: %d", highest_comp_level());
2225 st->print_cr(" - vtable index: %d", _vtable_index);
2226 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2227 st->print( " - adapters: ");
2228 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2229 if (a == nullptr)
2230 st->print_cr(PTR_FORMAT, p2i(a));
2231 else
2232 a->print_adapter_on(st);
2233 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2234 st->print_cr(" - code size: %d", code_size());
2235 if (code_size() != 0) {
2236 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2237 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2238 }
2239 if (method_data() != nullptr) {
2240 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2241 }
2242 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2243 if (checked_exceptions_length() > 0) {
2244 CheckedExceptionElement* table = checked_exceptions_start();
2245 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2246 if (Verbose) {
2247 for (int i = 0; i < checked_exceptions_length(); i++) {
2248 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2249 }
2250 }
2251 }
2252 if (has_linenumber_table()) {
2253 u_char* table = compressed_linenumber_table();
2292 if (is_overpass()) {
2293 st->print("overpass ");
2294 }
2295 }
2296 #endif //PRODUCT
2297
2298 void Method::print_access_flags(outputStream* st) const {
2299 AccessFlags flags = access_flags();
2300 if (flags.is_public ()) st->print("public ");
2301 if (flags.is_private ()) st->print("private ");
2302 if (flags.is_protected ()) st->print("protected ");
2303 if (flags.is_static ()) st->print("static ");
2304 if (flags.is_final ()) st->print("final ");
2305 if (flags.is_synchronized()) st->print("synchronized ");
2306 if (flags.is_bridge ()) st->print("bridge ");
2307 if (flags.is_varargs ()) st->print("varargs ");
2308 if (flags.is_native ()) st->print("native ");
2309 if (flags.is_abstract ()) st->print("abstract ");
2310 if (flags.is_strictfp ()) st->print("strict ");
2311 if (flags.is_synthetic ()) st->print("synthetic ");
2312 }
2313
2314 void Method::print_value_on(outputStream* st) const {
2315 assert(is_method(), "must be method");
2316 st->print("%s", internal_name());
2317 print_address_on(st);
2318 st->print(" ");
2319 name()->print_value_on(st);
2320 st->print(" ");
2321 signature()->print_value_on(st);
2322 st->print(" in ");
2323 method_holder()->print_value_on(st);
2324 if (WizardMode) st->print("#%d", _vtable_index);
2325 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2326 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2327 }
2328
2329 // Verification
2330
2331 void Method::verify_on(outputStream* st) {
2332 guarantee(is_method(), "object must be method");
2333 guarantee(constants()->is_constantPool(), "should be constant pool");
2334 MethodData* md = method_data();
2335 guarantee(md == nullptr ||
2336 md->is_methodData(), "should be method data");
2337 }
|
36 #include "code/debugInfoRec.hpp"
37 #include "compiler/compilationPolicy.hpp"
38 #include "gc/shared/collectedHeap.inline.hpp"
39 #include "interpreter/bytecodes.hpp"
40 #include "interpreter/bytecodeStream.hpp"
41 #include "interpreter/bytecodeTracer.hpp"
42 #include "interpreter/interpreter.hpp"
43 #include "interpreter/oopMapCache.hpp"
44 #include "logging/log.hpp"
45 #include "logging/logStream.hpp"
46 #include "logging/logTag.hpp"
47 #include "memory/allocation.inline.hpp"
48 #include "memory/metadataFactory.hpp"
49 #include "memory/metaspaceClosure.hpp"
50 #include "memory/oopFactory.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "memory/universe.hpp"
53 #include "nmt/memTracker.hpp"
54 #include "oops/constantPool.hpp"
55 #include "oops/constMethod.hpp"
56 #include "oops/inlineKlass.inline.hpp"
57 #include "oops/jmethodIDTable.hpp"
58 #include "oops/klass.inline.hpp"
59 #include "oops/method.inline.hpp"
60 #include "oops/methodData.hpp"
61 #include "oops/objArrayKlass.hpp"
62 #include "oops/objArrayOop.inline.hpp"
63 #include "oops/oop.inline.hpp"
64 #include "oops/symbol.hpp"
65 #include "oops/trainingData.hpp"
66 #include "prims/jvmtiExport.hpp"
67 #include "prims/methodHandles.hpp"
68 #include "runtime/arguments.hpp"
69 #include "runtime/atomicAccess.hpp"
70 #include "runtime/continuationEntry.hpp"
71 #include "runtime/frame.inline.hpp"
72 #include "runtime/handles.inline.hpp"
73 #include "runtime/init.hpp"
74 #include "runtime/java.hpp"
75 #include "runtime/orderAccess.hpp"
76 #include "runtime/perfData.hpp"
151 method_data()->~MethodData();
152 }
153 }
154
155 address Method::get_i2c_entry() {
156 if (is_abstract()) {
157 return SharedRuntime::throw_AbstractMethodError_entry();
158 }
159 assert(adapter() != nullptr, "must have");
160 return adapter()->get_i2c_entry();
161 }
162
163 address Method::get_c2i_entry() {
164 if (is_abstract()) {
165 return SharedRuntime::get_handle_wrong_method_abstract_stub();
166 }
167 assert(adapter() != nullptr, "must have");
168 return adapter()->get_c2i_entry();
169 }
170
171 address Method::get_c2i_inline_entry() {
172 if (is_abstract()) {
173 return SharedRuntime::get_handle_wrong_method_abstract_stub();
174 }
175 assert(adapter() != nullptr, "must have");
176 return adapter()->get_c2i_inline_entry();
177 }
178
179 address Method::get_c2i_inline_ro_entry() {
180 if (is_abstract()) {
181 return SharedRuntime::get_handle_wrong_method_abstract_stub();
182 }
183 assert(adapter() != nullptr, "must have");
184 return adapter()->get_c2i_inline_ro_entry();
185 }
186
187 address Method::get_c2i_unverified_entry() {
188 if (is_abstract()) {
189 return SharedRuntime::get_handle_wrong_method_abstract_stub();
190 }
191 assert(adapter() != nullptr, "must have");
192 return adapter()->get_c2i_unverified_entry();
193 }
194
195 address Method::get_c2i_unverified_inline_entry() {
196 assert(adapter() != nullptr, "must have");
197 return adapter()->get_c2i_unverified_inline_entry();
198 }
199
200 address Method::get_c2i_no_clinit_check_entry() {
201 if (is_abstract()) {
202 return nullptr;
203 }
204 assert(VM_Version::supports_fast_class_init_checks(), "");
205 assert(adapter() != nullptr, "must have");
206 return adapter()->get_c2i_no_clinit_check_entry();
207 }
208
209 char* Method::name_and_sig_as_C_string(bool use_double_colon) const {
210 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), use_double_colon);
211 }
212
213 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
214 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
215 }
216
217 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, bool use_double_colon) {
218 const char* klass_name = klass->external_name();
219 int klass_name_len = (int)strlen(klass_name);
414 return code_base();
415 } else {
416 return bcp;
417 }
418 }
419
420 int Method::size(bool is_native) {
421 // If native, then include pointers for native_function and signature_handler
422 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
423 int extra_words = align_up(extra_bytes, BytesPerWord) / BytesPerWord;
424 return align_metadata_size(header_size() + extra_words);
425 }
426
427 Symbol* Method::klass_name() const {
428 return method_holder()->name();
429 }
430
431 void Method::metaspace_pointers_do(MetaspaceClosure* it) {
432 log_trace(aot)("Iter(Method): %p", this);
433
434 if (!method_holder()->is_rewritten() || Arguments::is_valhalla_enabled()) {
435 it->push(&_constMethod, MetaspaceClosure::_writable);
436 } else {
437 it->push(&_constMethod);
438 }
439 it->push(&_adapter);
440 it->push(&_method_data);
441 it->push(&_method_counters);
442 NOT_PRODUCT(it->push(&_name);)
443 }
444
445 #if INCLUDE_CDS
446 // Attempt to return method to original state. Clear any pointers
447 // (to objects outside the shared spaces). We won't be able to predict
448 // where they should point in a new JVM. Further initialize some
449 // entries now in order allow them to be write protected later.
450
451 void Method::remove_unshareable_info() {
452 unlink_method();
453 if (method_data() != nullptr) {
454 method_data()->remove_unshareable_info();
457 method_counters()->remove_unshareable_info();
458 }
459 if (CDSConfig::is_dumping_adapters() && _adapter != nullptr) {
460 _adapter->remove_unshareable_info();
461 _adapter = nullptr;
462 }
463 JFR_ONLY(REMOVE_METHOD_ID(this);)
464 }
465
466 void Method::restore_unshareable_info(TRAPS) {
467 assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
468 if (method_data() != nullptr) {
469 method_data()->restore_unshareable_info(CHECK);
470 }
471 if (method_counters() != nullptr) {
472 method_counters()->restore_unshareable_info(CHECK);
473 }
474 if (_adapter != nullptr) {
475 assert(_adapter->is_linked(), "must be");
476 _from_compiled_entry = _adapter->get_c2i_entry();
477 _from_compiled_inline_entry = _adapter->get_c2i_inline_entry();
478 _from_compiled_inline_ro_entry = _adapter->get_c2i_inline_ro_entry();
479 }
480 assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
481 }
482 #endif
483
484 void Method::set_vtable_index(int index) {
485 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
486 // At runtime initialize_vtable is rerun as part of link_class_impl()
487 // for a shared class loaded by the non-boot loader to obtain the loader
488 // constraints based on the runtime classloaders' context.
489 return; // don't write into the shared class
490 } else {
491 _vtable_index = index;
492 }
493 }
494
495 void Method::set_itable_index(int index) {
496 if (in_aot_cache() && !AOTMetaspace::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
497 // At runtime initialize_itable is rerun as part of link_class_impl()
498 // for a shared class loaded by the non-boot loader to obtain the loader
748 bool Method::init_method_counters(MethodCounters* counters) {
749 // Try to install a pointer to MethodCounters, return true on success.
750 return AtomicAccess::replace_if_null(&_method_counters, counters);
751 }
752
753 void Method::set_exception_handler_entered(int handler_bci) {
754 if (ProfileExceptionHandlers) {
755 MethodData* mdo = method_data();
756 if (mdo != nullptr) {
757 BitData handler_data = mdo->exception_handler_bci_to_data(handler_bci);
758 handler_data.set_exception_handler_entered();
759 }
760 }
761 }
762
763 int Method::extra_stack_words() {
764 // not an inline function, to avoid a header dependency on Interpreter
765 return extra_stack_entries() * Interpreter::stackElementSize;
766 }
767
768 // InlineKlass the method is declared to return. This must not
769 // safepoint as it is called with references live on the stack at
770 // locations the GC is unaware of.
771 InlineKlass* Method::returns_inline_type() const {
772 assert(InlineTypeReturnedAsFields, "Inline types should never be returned as fields");
773 if (is_native()) {
774 return nullptr;
775 }
776 NoSafepointVerifier nsv;
777 SignatureStream ss(signature());
778 ss.skip_to_return_type();
779 return ss.as_inline_klass(method_holder());
780 }
781
782 bool Method::compute_has_loops_flag() {
783 BytecodeStream bcs(methodHandle(Thread::current(), this));
784 Bytecodes::Code bc;
785
786 while ((bc = bcs.next()) >= 0) {
787 switch (bc) {
788 case Bytecodes::_ifeq:
789 case Bytecodes::_ifnull:
790 case Bytecodes::_iflt:
791 case Bytecodes::_ifle:
792 case Bytecodes::_ifne:
793 case Bytecodes::_ifnonnull:
794 case Bytecodes::_ifgt:
795 case Bytecodes::_ifge:
796 case Bytecodes::_if_icmpeq:
797 case Bytecodes::_if_icmpne:
798 case Bytecodes::_if_icmplt:
799 case Bytecodes::_if_icmpgt:
800 case Bytecodes::_if_icmple:
801 case Bytecodes::_if_icmpge:
910
911 bool Method::is_accessor() const {
912 return is_getter() || is_setter();
913 }
914
915 bool Method::is_getter() const {
916 if (code_size() != 5) return false;
917 if (size_of_parameters() != 1) return false;
918 if (java_code_at(0) != Bytecodes::_aload_0) return false;
919 if (java_code_at(1) != Bytecodes::_getfield) return false;
920 switch (java_code_at(4)) {
921 case Bytecodes::_ireturn:
922 case Bytecodes::_lreturn:
923 case Bytecodes::_freturn:
924 case Bytecodes::_dreturn:
925 case Bytecodes::_areturn:
926 break;
927 default:
928 return false;
929 }
930 if (InlineTypeReturnedAsFields && returns_inline_type() != nullptr) {
931 // Don't treat this as (trivial) getter method because the
932 // inline type could be returned in a scalarized form.
933 return false;
934 }
935 return true;
936 }
937
938 bool Method::is_setter() const {
939 if (code_size() != 6) return false;
940 if (java_code_at(0) != Bytecodes::_aload_0) return false;
941 switch (java_code_at(1)) {
942 case Bytecodes::_iload_1:
943 case Bytecodes::_aload_1:
944 case Bytecodes::_fload_1:
945 if (size_of_parameters() != 2) return false;
946 break;
947 case Bytecodes::_dload_1:
948 case Bytecodes::_lload_1:
949 if (size_of_parameters() != 3) return false;
950 break;
951 default:
952 return false;
953 }
954 if (java_code_at(2) != Bytecodes::_putfield) return false;
955 if (java_code_at(5) != Bytecodes::_return) return false;
956 if (has_scalarized_args()) {
957 // Don't treat this as (trivial) setter method because the
958 // inline type argument should be passed in a scalarized form.
959 return false;
960 }
961 return true;
962 }
963
964 bool Method::is_constant_getter() const {
965 int last_index = code_size() - 1;
966 // Check if the first 1-3 bytecodes are a constant push
967 // and the last bytecode is a return.
968 return (2 <= code_size() && code_size() <= 4 &&
969 Bytecodes::is_const(java_code_at(0)) &&
970 Bytecodes::length_for(java_code_at(0)) == last_index &&
971 Bytecodes::is_return(java_code_at(last_index)) &&
972 !has_scalarized_args());
973 }
974
975 bool Method::is_class_initializer() const {
976 // For classfiles version 51 or greater, ensure that the clinit method is
977 // static. Non-static methods with the name "<clinit>" are not static
978 // initializers. (older classfiles exempted for backward compatibility)
979 return (name() == vmSymbols::class_initializer_name() &&
980 (is_static() ||
981 method_holder()->major_version() < 51));
982 }
983
984 // A method named <init>, is a classic object constructor.
985 bool Method::is_object_constructor() const {
986 return name() == vmSymbols::object_initializer_name();
987 }
988
989 bool Method::needs_clinit_barrier() const {
990 return is_static() && !method_holder()->is_initialized();
991 }
992
993 bool Method::is_object_wait0() const {
994 return klass_name() == vmSymbols::java_lang_Object()
995 && name() == vmSymbols::wait_name();
996 }
997
998 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
999 int length = method->checked_exceptions_length();
1000 if (length == 0) { // common case
1001 return objArrayHandle(THREAD, Universe::the_empty_class_array());
1002 } else {
1003 methodHandle h_this(THREAD, method);
1004 objArrayOop m_oop = oopFactory::new_objArray(vmClasses::Class_klass(), length, CHECK_(objArrayHandle()));
1005 objArrayHandle mirrors (THREAD, m_oop);
1006 for (int i = 0; i < length; i++) {
1029 // Not necessarily sorted and not necessarily one-to-one.
1030 CompressedLineNumberReadStream stream(compressed_linenumber_table());
1031 while (stream.read_pair()) {
1032 if (stream.bci() == bci) {
1033 // perfect match
1034 return stream.line();
1035 } else {
1036 // update best_bci/line
1037 if (stream.bci() < bci && stream.bci() >= best_bci) {
1038 best_bci = stream.bci();
1039 best_line = stream.line();
1040 }
1041 }
1042 }
1043 }
1044 return best_line;
1045 }
1046
1047
1048 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
1049 if( constants()->tag_at(klass_index).is_unresolved_klass()) {
1050 Thread *thread = Thread::current();
1051 Symbol* klass_name = constants()->klass_name_at(klass_index);
1052 Handle loader(thread, method_holder()->class_loader());
1053 return SystemDictionary::find_instance_klass(thread, klass_name, loader) != nullptr;
1054 } else {
1055 return true;
1056 }
1057 }
1058
1059
1060 bool Method::is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved) const {
1061 int klass_index = constants()->klass_ref_index_at(refinfo_index, bc);
1062 if (must_be_resolved) {
1063 // Make sure klass is resolved in constantpool.
1064 if (constants()->tag_at(klass_index).is_unresolved_klass()) {
1065 return false;
1066 }
1067 }
1068 return is_klass_loaded_by_klass_index(klass_index);
1069 }
1070
1071
1072 void Method::set_native_function(address function, bool post_event_flag) {
1073 assert(function != nullptr, "use clear_native_function to unregister natives");
1074 assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
1075 address* native_function = native_function_addr();
1076
1077 // We can see racers trying to place the same native function into place. Once
1078 // is plenty.
1079 address current = *native_function;
1080 if (current == function) return;
1081 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
1082 function != nullptr) {
1083 // native_method_throw_unsatisfied_link_error_entry() should only
1084 // be passed when post_event_flag is false.
1085 assert(function !=
1086 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1214
1215 void Method::set_not_osr_compilable(const char* reason, int comp_level, bool report) {
1216 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
1217 if (comp_level == CompLevel_all) {
1218 set_is_not_c1_osr_compilable();
1219 set_is_not_c2_osr_compilable();
1220 } else {
1221 if (is_c1_compile(comp_level))
1222 set_is_not_c1_osr_compilable();
1223 if (is_c2_compile(comp_level))
1224 set_is_not_c2_osr_compilable();
1225 }
1226 assert(!CompilationPolicy::can_be_osr_compiled(methodHandle(Thread::current(), this), comp_level), "sanity check");
1227 }
1228
1229 // Revert to using the interpreter and clear out the nmethod
1230 void Method::clear_code() {
1231 // this may be null if c2i adapters have not been made yet
1232 // Only should happen at allocate time.
1233 if (adapter() == nullptr) {
1234 _from_compiled_entry = nullptr;
1235 _from_compiled_inline_entry = nullptr;
1236 _from_compiled_inline_ro_entry = nullptr;
1237 } else {
1238 _from_compiled_entry = adapter()->get_c2i_entry();
1239 _from_compiled_inline_entry = adapter()->get_c2i_inline_entry();
1240 _from_compiled_inline_ro_entry = adapter()->get_c2i_inline_ro_entry();
1241 }
1242 OrderAccess::storestore();
1243 _from_interpreted_entry = _i2i_entry;
1244 OrderAccess::storestore();
1245 _code = nullptr;
1246 }
1247
1248 void Method::unlink_code(nmethod *compare) {
1249 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1250 // We need to check if either the _code or _from_compiled_code_entry_point
1251 // refer to this nmethod because there is a race in setting these two fields
1252 // in Method* as seen in bugid 4947125.
1253 if (code() == compare ||
1254 from_compiled_entry() == compare->verified_entry_point()) {
1255 clear_code();
1256 }
1257 }
1258
1259 void Method::unlink_code() {
1260 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
1261 clear_code();
1262 }
1263
1264 #if INCLUDE_CDS
1265 // Called by class data sharing to remove any entry points (which are not shared)
1266 void Method::unlink_method() {
1267 assert(CDSConfig::is_dumping_archive(), "sanity");
1268 _code = nullptr;
1269 if (!CDSConfig::is_dumping_adapters()) {
1270 _adapter = nullptr;
1271 }
1272 _i2i_entry = nullptr;
1273 _from_compiled_entry = nullptr;
1274 _from_compiled_inline_entry = nullptr;
1275 _from_compiled_inline_ro_entry = nullptr;
1276 _from_interpreted_entry = nullptr;
1277
1278 if (is_native()) {
1279 *native_function_addr() = nullptr;
1280 set_signature_handler(nullptr);
1281 }
1282 NOT_PRODUCT(set_compiled_invocation_count(0);)
1283
1284 clear_method_data();
1285 clear_method_counters();
1286 clear_is_not_c1_compilable();
1287 clear_is_not_c1_osr_compilable();
1288 clear_is_not_c2_compilable();
1289 clear_is_not_c2_osr_compilable();
1290 clear_queued_for_compilation();
1291
1292 remove_unshareable_flags();
1293 }
1294
1295 void Method::remove_unshareable_flags() {
1296 // clear all the flags that shouldn't be in the archived version
1297 assert(!is_old(), "must be");
1298 assert(!is_obsolete(), "must be");
1299 assert(!is_deleted(), "must be");
1300
1301 set_is_prefixed_native(false);
1302 set_queued_for_compilation(false);
1303 set_is_not_c2_compilable(false);
1304 set_is_not_c1_compilable(false);
1305 set_is_not_c2_osr_compilable(false);
1306 set_on_stack_flag(false);
1307 set_has_scalarized_args(false);
1308 }
1309 #endif
1310
1311 // Called when the method_holder is getting linked. Setup entrypoints so the method
1312 // is ready to be called from interpreter, compiler, and vtables.
1313 void Method::link_method(const methodHandle& h_method, TRAPS) {
1314 if (log_is_enabled(Info, perf, class, link)) {
1315 ClassLoader::perf_ik_link_methods_count()->inc();
1316 }
1317
1318 // If the code cache is full, we may reenter this function for the
1319 // leftover methods that weren't linked.
1320 if (adapter() != nullptr) {
1321 if (adapter()->in_aot_cache()) {
1322 assert(adapter()->is_linked(), "Adapter is shared but not linked");
1323 } else {
1324 return;
1325 }
1326 }
1327 assert( _code == nullptr, "nothing compiled yet" );
1333 address entry = Interpreter::entry_for_method(h_method);
1334 assert(entry != nullptr, "interpreter entry must be non-null");
1335 // Sets both _i2i_entry and _from_interpreted_entry
1336 set_interpreter_entry(entry);
1337
1338 // Don't overwrite already registered native entries.
1339 if (is_native() && !has_native_function()) {
1340 set_native_function(
1341 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1342 !native_bind_event_is_interesting);
1343 }
1344
1345 // Setup compiler entrypoint. This is made eagerly, so we do not need
1346 // special handling of vtables. An alternative is to make adapters more
1347 // lazily by calling make_adapter() from from_compiled_entry() for the
1348 // normal calls. For vtable calls life gets more complicated. When a
1349 // call-site goes mega-morphic we need adapters in all methods which can be
1350 // called from the vtable. We need adapters on such methods that get loaded
1351 // later. Ditto for mega-morphic itable calls. If this proves to be a
1352 // problem we'll make these lazily later.
1353 // With the scalarized calling convention, create adapters for abstract
1354 // methods as well because the adapter is used to propagate the signature.
1355 if (_adapter == nullptr && (!h_method->is_abstract() || InlineTypePassFieldsAsArgs)) {
1356 make_adapters(h_method, CHECK);
1357 }
1358 h_method->_from_compiled_entry = h_method->get_c2i_entry();
1359 h_method->_from_compiled_inline_entry = h_method->get_c2i_inline_entry();
1360 h_method->_from_compiled_inline_ro_entry = h_method->get_c2i_inline_ro_entry();
1361
1362 // ONLY USE the h_method now as make_adapter may have blocked
1363
1364 if (h_method->is_continuation_native_intrinsic()) {
1365 _from_interpreted_entry = nullptr;
1366 _from_compiled_entry = nullptr;
1367 _i2i_entry = nullptr;
1368 if (Continuations::enabled()) {
1369 assert(!Threads::is_vm_complete(), "should only be called during vm init");
1370 AdapterHandlerLibrary::create_native_wrapper(h_method);
1371 if (!h_method->has_compiled_code()) {
1372 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
1373 }
1374 assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
1375 }
1376 }
1377 }
1378
1379 void Method::make_adapters(const methodHandle& mh, TRAPS) {
1380 assert(!mh->is_abstract() || InlineTypePassFieldsAsArgs, "abstract methods do not have adapters");
1381 PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
1382
1383 // Adapters for compiled code are made eagerly here. They are fairly
1384 // small (generally < 100 bytes) and quick to make (and cached and shared)
1385 // so making them eagerly shouldn't be too expensive.
1386 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1387 if (adapter == nullptr ) {
1388 if (!is_init_completed()) {
1389 // Don't throw exceptions during VM initialization because java.lang.* classes
1390 // might not have been initialized, causing problems when constructing the
1391 // Java exception object.
1392 vm_exit_during_initialization("Out of space in CodeCache for adapters");
1393 } else {
1394 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
1395 }
1396 }
1397
1398 assert(!mh->has_scalarized_args() || (adapter->get_sig_cc() != nullptr && adapter->get_sig_cc_ro() != nullptr), "should be initialized");
1399
1400 mh->set_adapter_entry(adapter);
1401 #ifndef ZERO
1402 assert(adapter->is_linked(), "Adapter must have been linked");
1403 #endif
1404 }
1405
1406 // The verified_code_entry() must be called when a invoke is resolved
1407 // on this method.
1408
1409 // It returns the compiled code entry point, after asserting not null.
1410 // This function is called after potential safepoints so that nmethod
1411 // or adapter that it points to is still live and valid.
1412 // This function must not hit a safepoint!
1413 address Method::verified_code_entry() {
1414 DEBUG_ONLY(NoSafepointVerifier nsv;)
1415 assert(_from_compiled_entry != nullptr, "must be set");
1416 return _from_compiled_entry;
1417 }
1418
1419 address Method::verified_inline_code_entry() {
1420 DEBUG_ONLY(NoSafepointVerifier nsv;)
1421 assert(_from_compiled_inline_entry != nullptr, "must be set");
1422 return _from_compiled_inline_entry;
1423 }
1424
1425 address Method::verified_inline_ro_code_entry() {
1426 DEBUG_ONLY(NoSafepointVerifier nsv;)
1427 assert(_from_compiled_inline_ro_entry != nullptr, "must be set");
1428 return _from_compiled_inline_ro_entry;
1429 }
1430
1431 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
1432 // (could be racing a deopt).
1433 // Not inline to avoid circular ref.
1434 bool Method::check_code() const {
1435 // cached in a register or local. There's a race on the value of the field.
1436 nmethod *code = AtomicAccess::load_acquire(&_code);
1437 return code == nullptr || (code->method() == nullptr) || (code->method() == (Method*)this && !code->is_osr_method());
1438 }
1439
1440 // Install compiled code. Instantly it can execute.
1441 void Method::set_code(const methodHandle& mh, nmethod *code) {
1442 assert_lock_strong(NMethodState_lock);
1443 assert( code, "use clear_code to remove code" );
1444 assert( mh->check_code(), "" );
1445
1446 guarantee(mh->adapter() != nullptr, "Adapter blob must already exist!");
1447
1448 // These writes must happen in this order, because the interpreter will
1449 // directly jump to from_interpreted_entry which jumps to an i2c adapter
1450 // which jumps to _from_compiled_entry.
1451 mh->_code = code; // Assign before allowing compiled code to exec
1452
1453 int comp_level = code->comp_level();
1454 // In theory there could be a race here. In practice it is unlikely
1455 // and not worth worrying about.
1456 if (comp_level > mh->highest_comp_level()) {
1457 mh->set_highest_comp_level(comp_level);
1458 }
1459
1460 OrderAccess::storestore();
1461 mh->_from_compiled_entry = code->verified_entry_point();
1462 mh->_from_compiled_inline_entry = code->verified_inline_entry_point();
1463 mh->_from_compiled_inline_ro_entry = code->verified_inline_ro_entry_point();
1464 OrderAccess::storestore();
1465
1466 if (mh->is_continuation_native_intrinsic()) {
1467 assert(mh->_from_interpreted_entry == nullptr, "initialized incorrectly"); // see link_method
1468
1469 if (mh->is_continuation_enter_intrinsic()) {
1470 // This is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1471 mh->_i2i_entry = ContinuationEntry::interpreted_entry();
1472 } else if (mh->is_continuation_yield_intrinsic()) {
1473 mh->_i2i_entry = mh->get_i2c_entry();
1474 } else {
1475 guarantee(false, "Unknown Continuation native intrinsic");
1476 }
1477 // This must come last, as it is what's tested in LinkResolver::resolve_static_call
1478 AtomicAccess::release_store(&mh->_from_interpreted_entry , mh->get_i2c_entry());
1479 } else if (!mh->is_method_handle_intrinsic()) {
1480 // Instantly compiled code can execute.
1481 mh->_from_interpreted_entry = mh->get_i2c_entry();
1482 }
1483 }
1636 assert(m->can_be_statically_bound(), "");
1637 m->set_vtable_index(Method::nonvirtual_vtable_index);
1638 m->link_method(m, CHECK_(empty));
1639
1640 if (iid == vmIntrinsics::_linkToNative) {
1641 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1642 }
1643 if (log_is_enabled(Debug, methodhandles)) {
1644 LogTarget(Debug, methodhandles) lt;
1645 LogStream ls(lt);
1646 m->print_on(&ls);
1647 }
1648
1649 return m;
1650 }
1651
1652 #if INCLUDE_CDS
1653 void Method::restore_archived_method_handle_intrinsic(methodHandle m, TRAPS) {
1654 if (m->adapter() != nullptr) {
1655 m->set_from_compiled_entry(m->adapter()->get_c2i_entry());
1656 m->set_from_compiled_inline_entry(m->adapter()->get_c2i_inline_entry());
1657 m->set_from_compiled_inline_ro_entry(m->adapter()->get_c2i_inline_ro_entry());
1658 }
1659 m->link_method(m, CHECK);
1660
1661 if (m->intrinsic_id() == vmIntrinsics::_linkToNative) {
1662 m->set_interpreter_entry(m->adapter()->get_i2c_entry());
1663 }
1664 }
1665 #endif
1666
1667 Klass* Method::check_non_bcp_klass(Klass* klass) {
1668 if (klass != nullptr && klass->class_loader() != nullptr) {
1669 if (klass->is_objArray_klass())
1670 klass = ObjArrayKlass::cast(klass)->bottom_klass();
1671 return klass;
1672 }
1673 return nullptr;
1674 }
1675
1676
1677 methodHandle Method::clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length,
2255 }
2256
2257 // Check that this pointer is valid by checking that the vtbl pointer matches
2258 bool Method::is_valid_method(const Method* m) {
2259 if (m == nullptr) {
2260 return false;
2261 } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2262 // Quick sanity check on pointer.
2263 return false;
2264 } else if (!os::is_readable_range(m, m + 1)) {
2265 return false;
2266 } else if (m->in_aot_cache()) {
2267 return CppVtables::is_valid_shared_method(m);
2268 } else if (Metaspace::contains_non_shared(m)) {
2269 return has_method_vptr((const void*)m);
2270 } else {
2271 return false;
2272 }
2273 }
2274
2275 bool Method::is_scalarized_arg(int idx) const {
2276 if (!has_scalarized_args()) {
2277 return false;
2278 }
2279 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID
2280 int depth = 0;
2281 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc();
2282 for (int i = 0; i < sig->length(); i++) {
2283 BasicType bt = sig->at(i)._bt;
2284 if (bt == T_METADATA) {
2285 depth++;
2286 }
2287 if (idx == 0) {
2288 break; // Argument found
2289 }
2290 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) {
2291 depth--;
2292 }
2293 if (depth == 0 && bt != T_LONG && bt != T_DOUBLE) {
2294 idx--; // Advance to next argument
2295 }
2296 }
2297 return depth != 0;
2298 }
2299
2300 bool Method::is_scalarized_buffer_arg(int idx) const {
2301 if (!has_scalarized_args()) {
2302 return false;
2303 }
2304 // Search through signature and check if argument is wrapped in T_METADATA/T_VOID
2305 int depth = 0;
2306 const GrowableArray<SigEntry>* sig = adapter()->get_sig_cc();
2307 for (int i = 0; i < sig->length(); i++) {
2308 BasicType bt = sig->at(i)._bt;
2309 if (bt == T_METADATA) {
2310 depth++;
2311 continue;
2312 }
2313 if (bt == T_VOID && (sig->at(i-1)._bt != T_LONG && sig->at(i-1)._bt != T_DOUBLE)) {
2314 depth--;
2315 continue;
2316 }
2317 if (idx == 0) {
2318 if (sig->at(i)._vt_oop) {
2319 assert(depth == 1, "only for root value");
2320 return true;
2321 }
2322 break; // Argument found
2323 }
2324 idx--; // Advance to next argument
2325 }
2326 return false;
2327 }
2328
2329 // Printing
2330
2331 #ifndef PRODUCT
2332
2333 void Method::print_on(outputStream* st) const {
2334 ResourceMark rm;
2335 assert(is_method(), "must be method");
2336 st->print_cr("%s", internal_name());
2337 st->print_cr(" - this oop: " PTR_FORMAT, p2i(this));
2338 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
2339 st->print (" - constants: " PTR_FORMAT " ", p2i(constants()));
2340 constants()->print_value_on(st); st->cr();
2341 st->print (" - access: 0x%x ", access_flags().as_method_flags()); print_access_flags(st); st->cr();
2342 st->print (" - flags: 0x%x ", _flags.as_int()); _flags.print_on(st); st->cr();
2343 st->print (" - name: "); name()->print_value_on(st); st->cr();
2344 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
2345 st->print_cr(" - max stack: %d", max_stack());
2346 st->print_cr(" - max locals: %d", max_locals());
2347 st->print_cr(" - size of params: %d", size_of_parameters());
2348 st->print_cr(" - method size: %d", method_size());
2349 if (intrinsic_id() != vmIntrinsics::_none)
2350 st->print_cr(" - intrinsic id: %d %s", vmIntrinsics::as_int(intrinsic_id()), vmIntrinsics::name_at(intrinsic_id()));
2351 if (highest_comp_level() != CompLevel_none)
2352 st->print_cr(" - highest level: %d", highest_comp_level());
2353 st->print_cr(" - vtable index: %d", _vtable_index);
2354 #ifdef ASSERT
2355 if (valid_itable_index())
2356 st->print_cr(" - itable index: %d", itable_index());
2357 #endif
2358 st->print_cr(" - i2i entry: " PTR_FORMAT, p2i(interpreter_entry()));
2359 st->print( " - adapters: ");
2360 AdapterHandlerEntry* a = ((Method*)this)->adapter();
2361 if (a == nullptr)
2362 st->print_cr(PTR_FORMAT, p2i(a));
2363 else
2364 a->print_adapter_on(st);
2365 st->print_cr(" - compiled entry " PTR_FORMAT, p2i(from_compiled_entry()));
2366 st->print_cr(" - compiled inline entry " PTR_FORMAT, p2i(from_compiled_inline_entry()));
2367 st->print_cr(" - compiled inline ro entry " PTR_FORMAT, p2i(from_compiled_inline_ro_entry()));
2368 st->print_cr(" - code size: %d", code_size());
2369 if (code_size() != 0) {
2370 st->print_cr(" - code start: " PTR_FORMAT, p2i(code_base()));
2371 st->print_cr(" - code end (excl): " PTR_FORMAT, p2i(code_base() + code_size()));
2372 }
2373 if (method_data() != nullptr) {
2374 st->print_cr(" - method data: " PTR_FORMAT, p2i(method_data()));
2375 }
2376 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
2377 if (checked_exceptions_length() > 0) {
2378 CheckedExceptionElement* table = checked_exceptions_start();
2379 st->print_cr(" - checked ex start: " PTR_FORMAT, p2i(table));
2380 if (Verbose) {
2381 for (int i = 0; i < checked_exceptions_length(); i++) {
2382 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2383 }
2384 }
2385 }
2386 if (has_linenumber_table()) {
2387 u_char* table = compressed_linenumber_table();
2426 if (is_overpass()) {
2427 st->print("overpass ");
2428 }
2429 }
2430 #endif //PRODUCT
2431
2432 void Method::print_access_flags(outputStream* st) const {
2433 AccessFlags flags = access_flags();
2434 if (flags.is_public ()) st->print("public ");
2435 if (flags.is_private ()) st->print("private ");
2436 if (flags.is_protected ()) st->print("protected ");
2437 if (flags.is_static ()) st->print("static ");
2438 if (flags.is_final ()) st->print("final ");
2439 if (flags.is_synchronized()) st->print("synchronized ");
2440 if (flags.is_bridge ()) st->print("bridge ");
2441 if (flags.is_varargs ()) st->print("varargs ");
2442 if (flags.is_native ()) st->print("native ");
2443 if (flags.is_abstract ()) st->print("abstract ");
2444 if (flags.is_strictfp ()) st->print("strict ");
2445 if (flags.is_synthetic ()) st->print("synthetic ");
2446 if (Arguments::is_valhalla_enabled()) {
2447 if (flags.is_identity_class()) st->print("identity ");
2448 if (!flags.is_identity_class()) st->print("value " );
2449 }
2450 }
2451
2452 void Method::print_value_on(outputStream* st) const {
2453 assert(is_method(), "must be method");
2454 st->print("%s", internal_name());
2455 print_address_on(st);
2456 st->print(" ");
2457 if (WizardMode) print_access_flags(st);
2458 name()->print_value_on(st);
2459 st->print(" ");
2460 signature()->print_value_on(st);
2461 st->print(" in ");
2462 method_holder()->print_value_on(st);
2463 if (WizardMode) st->print("#%d", _vtable_index);
2464 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2465 if (WizardMode && code() != nullptr) st->print(" ((nmethod*)%p)", code());
2466 }
2467
2468 // Verification
2469
2470 void Method::verify_on(outputStream* st) {
2471 guarantee(is_method(), "object must be method");
2472 guarantee(constants()->is_constantPool(), "should be constant pool");
2473 MethodData* md = method_data();
2474 guarantee(md == nullptr ||
2475 md->is_methodData(), "should be method data");
2476 }
|