< prev index next >

src/hotspot/share/oops/method.cpp

Print this page

  86                          TRAPS) {
  87   assert(!access_flags.is_native() || byte_code_size == 0,
  88          "native methods should not contain byte codes");
  89   ConstMethod* cm = ConstMethod::allocate(loader_data,
  90                                           byte_code_size,
  91                                           sizes,
  92                                           method_type,
  93                                           CHECK_NULL);
  94   int size = Method::size(access_flags.is_native());
  95   return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags);
  96 }
  97 
  98 Method::Method(ConstMethod* xconst, AccessFlags access_flags) {
  99   NoSafepointVerifier no_safepoint;
 100   set_constMethod(xconst);
 101   set_access_flags(access_flags);
 102   set_intrinsic_id(vmIntrinsics::_none);
 103   set_force_inline(false);
 104   set_hidden(false);
 105   set_dont_inline(false);

 106   set_has_injected_profile(false);
 107   set_method_data(NULL);
 108   clear_method_counters();
 109   set_vtable_index(Method::garbage_vtable_index);
 110 
 111   // Fix and bury in Method*
 112   set_interpreter_entry(NULL); // sets i2i entry and from_int
 113   set_adapter_entry(NULL);
 114   Method::clear_code(); // from_c/from_i get set to c2i/i2i
 115 
 116   if (access_flags.is_native()) {
 117     clear_native_function();
 118     set_signature_handler(NULL);
 119   }
 120 
 121   NOT_PRODUCT(set_compiled_invocation_count(0);)
 122 }
 123 
 124 // Release Method*.  The nmethod will be gone when we get here because
 125 // we've walked the code cache.

 954     Thread *thread = Thread::current();
 955     Symbol* klass_name = constants()->klass_name_at(klass_index);
 956     Handle loader(thread, method_holder()->class_loader());
 957     Handle prot  (thread, method_holder()->protection_domain());
 958     return SystemDictionary::find_instance_klass(klass_name, loader, prot) != NULL;
 959   } else {
 960     return true;
 961   }
 962 }
 963 
 964 
 965 bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
 966   int klass_index = constants()->klass_ref_index_at(refinfo_index);
 967   if (must_be_resolved) {
 968     // Make sure klass is resolved in constantpool.
 969     if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
 970   }
 971   return is_klass_loaded_by_klass_index(klass_index);
 972 }
 973 
 974 
 975 void Method::set_native_function(address function, bool post_event_flag) {
 976   assert(function != NULL, "use clear_native_function to unregister natives");
 977   assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
 978   address* native_function = native_function_addr();
 979 
 980   // We can see racers trying to place the same native function into place. Once
 981   // is plenty.
 982   address current = *native_function;
 983   if (current == function) return;
 984   if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
 985       function != NULL) {
 986     // native_method_throw_unsatisfied_link_error_entry() should only
 987     // be passed when post_event_flag is false.
 988     assert(function !=
 989       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
 990       "post_event_flag mis-match");
 991 
 992     // post the bind event, and possible change the bind function
 993     JvmtiExport::post_native_method_bind(this, &function);
 994   }
 995   *native_function = function;
 996   // This function can be called more than once. We must make sure that we always
 997   // use the latest registered method -> check if a stub already has been generated.
 998   // If so, we have to make it not_entrant.
 999   CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
1000   if (nm != NULL) {
1001     nm->make_not_entrant();
1002   }
1003 }
1004 
1005 
1006 bool Method::has_native_function() const {
1007   if (is_method_handle_intrinsic())
1008     return false;  // special-cased in SharedRuntime::generate_native_wrapper
1009   address func = native_function();
1010   return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
1011 }
1012 
1013 
1014 void Method::clear_native_function() {
1015   // Note: is_method_handle_intrinsic() is allowed here.
1016   set_native_function(
1017     SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1018     !native_bind_event_is_interesting);
1019   this->unlink_code();
1020 }
1021 
1022 
1023 void Method::set_signature_handler(address handler) {
1024   address* signature_handler =  signature_handler_addr();
1025   *signature_handler = handler;
1026 }
1027 

1203   // Sets both _i2i_entry and _from_interpreted_entry
1204   set_interpreter_entry(entry);
1205 
1206   // Don't overwrite already registered native entries.
1207   if (is_native() && !has_native_function()) {
1208     set_native_function(
1209       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1210       !native_bind_event_is_interesting);
1211   }
1212 
1213   // Setup compiler entrypoint.  This is made eagerly, so we do not need
1214   // special handling of vtables.  An alternative is to make adapters more
1215   // lazily by calling make_adapter() from from_compiled_entry() for the
1216   // normal calls.  For vtable calls life gets more complicated.  When a
1217   // call-site goes mega-morphic we need adapters in all methods which can be
1218   // called from the vtable.  We need adapters on such methods that get loaded
1219   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1220   // problem we'll make these lazily later.
1221   (void) make_adapters(h_method, CHECK);
1222 


1223   // ONLY USE the h_method now as make_adapter may have blocked
1224 }
1225 
1226 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1227   // Adapters for compiled code are made eagerly here.  They are fairly
1228   // small (generally < 100 bytes) and quick to make (and cached and shared)
1229   // so making them eagerly shouldn't be too expensive.
1230   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1231   if (adapter == NULL ) {
1232     if (!is_init_completed()) {
1233       // Don't throw exceptions during VM initialization because java.lang.* classes
1234       // might not have been initialized, causing problems when constructing the
1235       // Java exception object.
1236       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1237     } else {
1238       THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1239     }
1240   }
1241 
1242   mh->set_adapter_entry(adapter);

1288   guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
1289 
1290   // These writes must happen in this order, because the interpreter will
1291   // directly jump to from_interpreted_entry which jumps to an i2c adapter
1292   // which jumps to _from_compiled_entry.
1293   mh->_code = code;             // Assign before allowing compiled code to exec
1294 
1295   int comp_level = code->comp_level();
1296   // In theory there could be a race here. In practice it is unlikely
1297   // and not worth worrying about.
1298   if (comp_level > mh->highest_comp_level()) {
1299     mh->set_highest_comp_level(comp_level);
1300   }
1301 
1302   OrderAccess::storestore();
1303   mh->_from_compiled_entry = code->verified_entry_point();
1304   OrderAccess::storestore();
1305   // Instantly compiled code can execute.
1306   if (!mh->is_method_handle_intrinsic())
1307     mh->_from_interpreted_entry = mh->get_i2c_entry();




1308 }
1309 
1310 
1311 bool Method::is_overridden_in(Klass* k) const {
1312   InstanceKlass* ik = InstanceKlass::cast(k);
1313 
1314   if (ik->is_interface()) return false;
1315 
1316   // If method is an interface, we skip it - except if it
1317   // is a miranda method
1318   if (method_holder()->is_interface()) {
1319     // Check that method is not a miranda method
1320     if (ik->lookup_method(name(), signature()) == NULL) {
1321       // No implementation exist - so miranda method
1322       return false;
1323     }
1324     return true;
1325   }
1326 
1327   assert(ik->is_subclass_of(method_holder()), "should be subklass");

1707     }
1708   }
1709   return sig_is_loaded;
1710 }
1711 
1712 bool Method::has_unloaded_classes_in_signature(const methodHandle& m, TRAPS) {
1713   ResourceMark rm(THREAD);
1714   for(ResolvingSignatureStream ss(m()); !ss.is_done(); ss.next()) {
1715     if (ss.type() == T_OBJECT) {
1716       // Do not use ss.is_reference() here, since we don't care about
1717       // unloaded array component types.
1718       Klass* klass = ss.as_klass_if_loaded(THREAD);
1719       assert(!HAS_PENDING_EXCEPTION, "as_klass_if_loaded contract");
1720       if (klass == NULL) return true;
1721     }
1722   }
1723   return false;
1724 }
1725 
1726 // Exposed so field engineers can debug VM
1727 void Method::print_short_name(outputStream* st) {
1728   ResourceMark rm;
1729 #ifdef PRODUCT
1730   st->print(" %s::", method_holder()->external_name());
1731 #else
1732   st->print(" %s::", method_holder()->internal_name());
1733 #endif
1734   name()->print_symbol_on(st);
1735   if (WizardMode) signature()->print_symbol_on(st);
1736   else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1737     MethodHandles::print_as_basic_type_signature_on(st, signature());
1738 }
1739 
1740 // Comparer for sorting an object array containing
1741 // Method*s.
1742 static int method_comparator(Method* a, Method* b) {
1743   return a->name()->fast_compare(b->name());
1744 }
1745 
1746 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1747 // default_methods also uses this without the ordering for fast find_method

1749   int length = methods->length();
1750   if (length > 1) {
1751     if (func == NULL) {
1752       func = method_comparator;
1753     }
1754     {
1755       NoSafepointVerifier nsv;
1756       QuickSort::sort(methods->data(), length, func, /*idempotent=*/false);
1757     }
1758     // Reset method ordering
1759     if (set_idnums) {
1760       for (int i = 0; i < length; i++) {
1761         Method* m = methods->at(i);
1762         m->set_method_idnum(i);
1763         m->set_orig_method_idnum(i);
1764       }
1765     }
1766   }
1767 }
1768 



















1769 //-----------------------------------------------------------------------------------
1770 // Non-product code unless JVM/TI needs it
1771 
1772 #if !defined(PRODUCT) || INCLUDE_JVMTI
1773 class SignatureTypePrinter : public SignatureTypeNames {
1774  private:
1775   outputStream* _st;
1776   bool _use_separator;
1777 
1778   void type_name(const char* name) {
1779     if (_use_separator) _st->print(", ");
1780     _st->print("%s", name);
1781     _use_separator = true;
1782   }
1783 
1784  public:
1785   SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1786     _st = st;
1787     _use_separator = false;
1788   }
1789 
1790   void print_parameters()              { _use_separator = false; do_parameters_on(this); }
1791   void print_returntype()              { _use_separator = false; do_type(return_type()); }
1792 };
1793 
1794 
1795 void Method::print_name(outputStream* st) {
1796   Thread *thread = Thread::current();
1797   ResourceMark rm(thread);
1798   st->print("%s ", is_static() ? "static" : "virtual");
1799   if (WizardMode) {
1800     st->print("%s.", method_holder()->internal_name());
1801     name()->print_symbol_on(st);
1802     signature()->print_symbol_on(st);
1803   } else {
1804     SignatureTypePrinter sig(signature(), st);
1805     sig.print_returntype();
1806     st->print(" %s.", method_holder()->internal_name());
1807     name()->print_symbol_on(st);
1808     st->print("(");
1809     sig.print_parameters();
1810     st->print(")");
1811   }
1812 }
1813 #endif // !PRODUCT || INCLUDE_JVMTI
1814 
1815 

2260   // Method should otherwise be valid. Assert for testing.
2261   assert(is_valid_method(o), "should be valid jmethodid");
2262   // If the method's class holder object is unreferenced, but not yet marked as
2263   // unloaded, we need to return NULL here too because after a safepoint, its memory
2264   // will be reclaimed.
2265   return o->method_holder()->is_loader_alive() ? o : NULL;
2266 };
2267 
2268 void Method::set_on_stack(const bool value) {
2269   // Set both the method itself and its constant pool.  The constant pool
2270   // on stack means some method referring to it is also on the stack.
2271   constants()->set_on_stack(value);
2272 
2273   bool already_set = on_stack();
2274   _access_flags.set_on_stack(value);
2275   if (value && !already_set) {
2276     MetadataOnStackMark::record(this);
2277   }
2278 }
2279 







2280 // Called when the class loader is unloaded to make all methods weak.
2281 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
2282   loader_data->jmethod_ids()->clear_all_methods();
2283 }
2284 
2285 bool Method::has_method_vptr(const void* ptr) {
2286   Method m;
2287   // This assumes that the vtbl pointer is the first word of a C++ object.
2288   return dereference_vptr(&m) == dereference_vptr(ptr);
2289 }
2290 
2291 // Check that this pointer is valid by checking that the vtbl pointer matches
2292 bool Method::is_valid_method(const Method* m) {
2293   if (m == NULL) {
2294     return false;
2295   } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2296     // Quick sanity check on pointer.
2297     return false;
2298   } else if (m->is_shared()) {
2299     return CppVtables::is_valid_shared_method(m);

  86                          TRAPS) {
  87   assert(!access_flags.is_native() || byte_code_size == 0,
  88          "native methods should not contain byte codes");
  89   ConstMethod* cm = ConstMethod::allocate(loader_data,
  90                                           byte_code_size,
  91                                           sizes,
  92                                           method_type,
  93                                           CHECK_NULL);
  94   int size = Method::size(access_flags.is_native());
  95   return new (loader_data, size, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags);
  96 }
  97 
  98 Method::Method(ConstMethod* xconst, AccessFlags access_flags) {
  99   NoSafepointVerifier no_safepoint;
 100   set_constMethod(xconst);
 101   set_access_flags(access_flags);
 102   set_intrinsic_id(vmIntrinsics::_none);
 103   set_force_inline(false);
 104   set_hidden(false);
 105   set_dont_inline(false);
 106   set_changes_current_thread(false);
 107   set_has_injected_profile(false);
 108   set_method_data(NULL);
 109   clear_method_counters();
 110   set_vtable_index(Method::garbage_vtable_index);
 111 
 112   // Fix and bury in Method*
 113   set_interpreter_entry(NULL); // sets i2i entry and from_int
 114   set_adapter_entry(NULL);
 115   Method::clear_code(); // from_c/from_i get set to c2i/i2i
 116 
 117   if (access_flags.is_native()) {
 118     clear_native_function();
 119     set_signature_handler(NULL);
 120   }
 121 
 122   NOT_PRODUCT(set_compiled_invocation_count(0);)
 123 }
 124 
 125 // Release Method*.  The nmethod will be gone when we get here because
 126 // we've walked the code cache.

 955     Thread *thread = Thread::current();
 956     Symbol* klass_name = constants()->klass_name_at(klass_index);
 957     Handle loader(thread, method_holder()->class_loader());
 958     Handle prot  (thread, method_holder()->protection_domain());
 959     return SystemDictionary::find_instance_klass(klass_name, loader, prot) != NULL;
 960   } else {
 961     return true;
 962   }
 963 }
 964 
 965 
 966 bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
 967   int klass_index = constants()->klass_ref_index_at(refinfo_index);
 968   if (must_be_resolved) {
 969     // Make sure klass is resolved in constantpool.
 970     if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
 971   }
 972   return is_klass_loaded_by_klass_index(klass_index);
 973 }
 974 

 975 void Method::set_native_function(address function, bool post_event_flag) {
 976   assert(function != NULL, "use clear_native_function to unregister natives");
 977   assert(!is_special_native_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
 978   address* native_function = native_function_addr();
 979 
 980   // We can see racers trying to place the same native function into place. Once
 981   // is plenty.
 982   address current = *native_function;
 983   if (current == function) return;
 984   if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
 985       function != NULL) {
 986     // native_method_throw_unsatisfied_link_error_entry() should only
 987     // be passed when post_event_flag is false.
 988     assert(function !=
 989       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
 990       "post_event_flag mis-match");
 991 
 992     // post the bind event, and possible change the bind function
 993     JvmtiExport::post_native_method_bind(this, &function);
 994   }
 995   *native_function = function;
 996   // This function can be called more than once. We must make sure that we always
 997   // use the latest registered method -> check if a stub already has been generated.
 998   // If so, we have to make it not_entrant.
 999   CompiledMethod* nm = code(); // Put it into local variable to guard against concurrent updates
1000   if (nm != NULL) {
1001     nm->make_not_entrant();
1002   }
1003 }
1004 
1005 
1006 bool Method::has_native_function() const {
1007   if (is_special_native_intrinsic())
1008     return false;  // special-cased in SharedRuntime::generate_native_wrapper
1009   address func = native_function();
1010   return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
1011 }
1012 
1013 
1014 void Method::clear_native_function() {
1015   // Note: is_method_handle_intrinsic() is allowed here.
1016   set_native_function(
1017     SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1018     !native_bind_event_is_interesting);
1019   this->unlink_code();
1020 }
1021 
1022 
1023 void Method::set_signature_handler(address handler) {
1024   address* signature_handler =  signature_handler_addr();
1025   *signature_handler = handler;
1026 }
1027 

1203   // Sets both _i2i_entry and _from_interpreted_entry
1204   set_interpreter_entry(entry);
1205 
1206   // Don't overwrite already registered native entries.
1207   if (is_native() && !has_native_function()) {
1208     set_native_function(
1209       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
1210       !native_bind_event_is_interesting);
1211   }
1212 
1213   // Setup compiler entrypoint.  This is made eagerly, so we do not need
1214   // special handling of vtables.  An alternative is to make adapters more
1215   // lazily by calling make_adapter() from from_compiled_entry() for the
1216   // normal calls.  For vtable calls life gets more complicated.  When a
1217   // call-site goes mega-morphic we need adapters in all methods which can be
1218   // called from the vtable.  We need adapters on such methods that get loaded
1219   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
1220   // problem we'll make these lazily later.
1221   (void) make_adapters(h_method, CHECK);
1222 
1223   set_num_stack_arg_slots();
1224 
1225   // ONLY USE the h_method now as make_adapter may have blocked
1226 }
1227 
1228 address Method::make_adapters(const methodHandle& mh, TRAPS) {
1229   // Adapters for compiled code are made eagerly here.  They are fairly
1230   // small (generally < 100 bytes) and quick to make (and cached and shared)
1231   // so making them eagerly shouldn't be too expensive.
1232   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
1233   if (adapter == NULL ) {
1234     if (!is_init_completed()) {
1235       // Don't throw exceptions during VM initialization because java.lang.* classes
1236       // might not have been initialized, causing problems when constructing the
1237       // Java exception object.
1238       vm_exit_during_initialization("Out of space in CodeCache for adapters");
1239     } else {
1240       THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters");
1241     }
1242   }
1243 
1244   mh->set_adapter_entry(adapter);

1290   guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
1291 
1292   // These writes must happen in this order, because the interpreter will
1293   // directly jump to from_interpreted_entry which jumps to an i2c adapter
1294   // which jumps to _from_compiled_entry.
1295   mh->_code = code;             // Assign before allowing compiled code to exec
1296 
1297   int comp_level = code->comp_level();
1298   // In theory there could be a race here. In practice it is unlikely
1299   // and not worth worrying about.
1300   if (comp_level > mh->highest_comp_level()) {
1301     mh->set_highest_comp_level(comp_level);
1302   }
1303 
1304   OrderAccess::storestore();
1305   mh->_from_compiled_entry = code->verified_entry_point();
1306   OrderAccess::storestore();
1307   // Instantly compiled code can execute.
1308   if (!mh->is_method_handle_intrinsic())
1309     mh->_from_interpreted_entry = mh->get_i2c_entry();
1310   if (mh->is_continuation_enter_intrinsic()) {
1311     mh->_i2i_entry = mh->get_i2c_entry(); // this is the entry used when we're in interpreter-only mode; see InterpreterMacroAssembler::jump_from_interpreted
1312     mh->_from_interpreted_entry = mh->get_i2c_entry();
1313   }
1314 }
1315 
1316 
1317 bool Method::is_overridden_in(Klass* k) const {
1318   InstanceKlass* ik = InstanceKlass::cast(k);
1319 
1320   if (ik->is_interface()) return false;
1321 
1322   // If method is an interface, we skip it - except if it
1323   // is a miranda method
1324   if (method_holder()->is_interface()) {
1325     // Check that method is not a miranda method
1326     if (ik->lookup_method(name(), signature()) == NULL) {
1327       // No implementation exist - so miranda method
1328       return false;
1329     }
1330     return true;
1331   }
1332 
1333   assert(ik->is_subclass_of(method_holder()), "should be subklass");

1713     }
1714   }
1715   return sig_is_loaded;
1716 }
1717 
1718 bool Method::has_unloaded_classes_in_signature(const methodHandle& m, TRAPS) {
1719   ResourceMark rm(THREAD);
1720   for(ResolvingSignatureStream ss(m()); !ss.is_done(); ss.next()) {
1721     if (ss.type() == T_OBJECT) {
1722       // Do not use ss.is_reference() here, since we don't care about
1723       // unloaded array component types.
1724       Klass* klass = ss.as_klass_if_loaded(THREAD);
1725       assert(!HAS_PENDING_EXCEPTION, "as_klass_if_loaded contract");
1726       if (klass == NULL) return true;
1727     }
1728   }
1729   return false;
1730 }
1731 
1732 // Exposed so field engineers can debug VM
1733 void Method::print_short_name(outputStream* st) const {
1734   ResourceMark rm;
1735 #ifdef PRODUCT
1736   st->print(" %s::", method_holder()->external_name());
1737 #else
1738   st->print(" %s::", method_holder()->internal_name());
1739 #endif
1740   name()->print_symbol_on(st);
1741   if (WizardMode) signature()->print_symbol_on(st);
1742   else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1743     MethodHandles::print_as_basic_type_signature_on(st, signature());
1744 }
1745 
1746 // Comparer for sorting an object array containing
1747 // Method*s.
1748 static int method_comparator(Method* a, Method* b) {
1749   return a->name()->fast_compare(b->name());
1750 }
1751 
1752 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1753 // default_methods also uses this without the ordering for fast find_method

1755   int length = methods->length();
1756   if (length > 1) {
1757     if (func == NULL) {
1758       func = method_comparator;
1759     }
1760     {
1761       NoSafepointVerifier nsv;
1762       QuickSort::sort(methods->data(), length, func, /*idempotent=*/false);
1763     }
1764     // Reset method ordering
1765     if (set_idnums) {
1766       for (int i = 0; i < length; i++) {
1767         Method* m = methods->at(i);
1768         m->set_method_idnum(i);
1769         m->set_orig_method_idnum(i);
1770       }
1771     }
1772   }
1773 }
1774 
1775 void Method::set_num_stack_arg_slots() {
1776   ResourceMark rm;
1777   int sizeargs = size_of_parameters();
1778   BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, sizeargs);
1779   VMRegPair* regs   = NEW_RESOURCE_ARRAY(VMRegPair, sizeargs);
1780 
1781   int sig_index = 0;
1782   if (!is_static()) sig_bt[sig_index++] = T_OBJECT; // 'this'
1783   for (SignatureStream ss(signature()); !ss.at_return_type(); ss.next()) {
1784     BasicType t = ss.type();
1785     assert(type2size[t] == 1 || type2size[t] == 2, "size is 1 or 2");
1786     sig_bt[sig_index++] = t;
1787     if (type2size[t] == 2) sig_bt[sig_index++] = T_VOID;
1788   }
1789   assert(sig_index == sizeargs, "");
1790 
1791   _num_stack_arg_slots = SharedRuntime::java_calling_convention(sig_bt, regs, sizeargs);
1792 }
1793 
1794 //-----------------------------------------------------------------------------------
1795 // Non-product code unless JVM/TI needs it
1796 
1797 #if !defined(PRODUCT) || INCLUDE_JVMTI
1798 class SignatureTypePrinter : public SignatureTypeNames {
1799  private:
1800   outputStream* _st;
1801   bool _use_separator;
1802 
1803   void type_name(const char* name) {
1804     if (_use_separator) _st->print(", ");
1805     _st->print("%s", name);
1806     _use_separator = true;
1807   }
1808 
1809  public:
1810   SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1811     _st = st;
1812     _use_separator = false;
1813   }
1814 
1815   void print_parameters()              { _use_separator = false; do_parameters_on(this); }
1816   void print_returntype()              { _use_separator = false; do_type(return_type()); }
1817 };
1818 
1819 
1820 void Method::print_name(outputStream* st) const {
1821   Thread *thread = Thread::current();
1822   ResourceMark rm(thread);
1823   st->print("%s ", is_static() ? "static" : "virtual");
1824   if (WizardMode) {
1825     st->print("%s.", method_holder()->internal_name());
1826     name()->print_symbol_on(st);
1827     signature()->print_symbol_on(st);
1828   } else {
1829     SignatureTypePrinter sig(signature(), st);
1830     sig.print_returntype();
1831     st->print(" %s.", method_holder()->internal_name());
1832     name()->print_symbol_on(st);
1833     st->print("(");
1834     sig.print_parameters();
1835     st->print(")");
1836   }
1837 }
1838 #endif // !PRODUCT || INCLUDE_JVMTI
1839 
1840 

2285   // Method should otherwise be valid. Assert for testing.
2286   assert(is_valid_method(o), "should be valid jmethodid");
2287   // If the method's class holder object is unreferenced, but not yet marked as
2288   // unloaded, we need to return NULL here too because after a safepoint, its memory
2289   // will be reclaimed.
2290   return o->method_holder()->is_loader_alive() ? o : NULL;
2291 };
2292 
2293 void Method::set_on_stack(const bool value) {
2294   // Set both the method itself and its constant pool.  The constant pool
2295   // on stack means some method referring to it is also on the stack.
2296   constants()->set_on_stack(value);
2297 
2298   bool already_set = on_stack();
2299   _access_flags.set_on_stack(value);
2300   if (value && !already_set) {
2301     MetadataOnStackMark::record(this);
2302   }
2303 }
2304 
2305 void Method::record_marking_cycle() {
2306   // If any method is on the stack in continuations, none of them can be reclaimed,
2307   // so save the marking cycle to check for the whole class in the cpCache.
2308   // The cpCache is writeable.
2309   constants()->cache()->record_marking_cycle();
2310 }
2311 
2312 // Called when the class loader is unloaded to make all methods weak.
2313 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
2314   loader_data->jmethod_ids()->clear_all_methods();
2315 }
2316 
2317 bool Method::has_method_vptr(const void* ptr) {
2318   Method m;
2319   // This assumes that the vtbl pointer is the first word of a C++ object.
2320   return dereference_vptr(&m) == dereference_vptr(ptr);
2321 }
2322 
2323 // Check that this pointer is valid by checking that the vtbl pointer matches
2324 bool Method::is_valid_method(const Method* m) {
2325   if (m == NULL) {
2326     return false;
2327   } else if ((intptr_t(m) & (wordSize-1)) != 0) {
2328     // Quick sanity check on pointer.
2329     return false;
2330   } else if (m->is_shared()) {
2331     return CppVtables::is_valid_shared_method(m);
< prev index next >