< prev index next > src/hotspot/share/oops/method.cpp
Print this page
#include "oops/methodData.hpp"
#include "oops/objArrayKlass.hpp"
#include "oops/objArrayOop.inline.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
+ #include "oops/trainingData.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/methodHandles.hpp"
#include "runtime/atomic.hpp"
#include "runtime/arguments.hpp"
#include "runtime/continuationEntry.hpp"
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
const char* klass_name = klass->external_name();
int klass_name_len = (int)strlen(klass_name);
int method_name_len = method_name->utf8_length();
! int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
strcpy(dest, klass_name);
! dest[klass_name_len] = '.';
! strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
! strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
dest[len] = 0;
return dest;
}
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
const char* klass_name = klass->external_name();
int klass_name_len = (int)strlen(klass_name);
int method_name_len = method_name->utf8_length();
! int len = klass_name_len + 2 + method_name_len + signature->utf8_length();
char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
strcpy(dest, klass_name);
! dest[klass_name_len + 0] = ':';
! dest[klass_name_len + 1] = ':';
! strcpy(&dest[klass_name_len + 2], method_name->as_C_string());
+ strcpy(&dest[klass_name_len + 2 + method_name_len], signature->as_C_string());
dest[len] = 0;
return dest;
}
char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
Symbol* Method::klass_name() const {
return method_holder()->name();
}
void Method::metaspace_pointers_do(MetaspaceClosure* it) {
! log_trace(cds)("Iter(Method): %p", this);
!
! if (!method_holder()->is_rewritten()) {
it->push(&_constMethod, MetaspaceClosure::_writable);
} else {
it->push(&_constMethod);
}
it->push(&_method_data);
Symbol* Method::klass_name() const {
return method_holder()->name();
}
void Method::metaspace_pointers_do(MetaspaceClosure* it) {
! LogStreamHandle(Trace, cds) lsh;
! if (lsh.is_enabled()) {
! lsh.print("Iter(Method): %p ", this);
+ print_external_name(&lsh);
+ lsh.cr();
+ }
+ if (method_holder() != nullptr && !method_holder()->is_rewritten()) {
+ // holder is null for MH intrinsic methods
it->push(&_constMethod, MetaspaceClosure::_writable);
} else {
it->push(&_constMethod);
}
it->push(&_method_data);
// where they should point in a new JVM. Further initialize some
// entries now in order allow them to be write protected later.
void Method::remove_unshareable_info() {
unlink_method();
+ if (method_data() != nullptr) {
+ method_data()->remove_unshareable_info();
+ }
+ if (method_counters() != nullptr) {
+ method_counters()->remove_unshareable_info();
+ }
JFR_ONLY(REMOVE_METHOD_ID(this);)
}
void Method::restore_unshareable_info(TRAPS) {
assert(is_method() && is_valid_method(this), "ensure C++ vtable is restored");
+ if (method_data() != nullptr) {
+ method_data()->restore_unshareable_info(CHECK);
+ }
+ if (method_counters() != nullptr) {
+ method_counters()->restore_unshareable_info(CHECK);
+ }
assert(!queued_for_compilation(), "method's queued_for_compilation flag should not be set");
+ assert(!pending_queue_processed(), "method's pending_queued_processed flag should not be set");
}
#endif
void Method::set_vtable_index(int index) {
if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
st->print_cr (" compiled_invocation_count: " INT64_FORMAT_W(11), compiled_invocation_count());
}
#endif
}
+ MethodTrainingData* Method::training_data_or_null() const {
+ MethodCounters* mcs = method_counters();
+ if (mcs == nullptr) {
+ return nullptr;
+ } else {
+ return mcs->method_training_data();
+ }
+ }
+
+ bool Method::init_training_data(MethodTrainingData* tdata) {
+ MethodCounters* mcs = method_counters();
+ if (mcs == nullptr) {
+ return false;
+ } else {
+ return mcs->init_method_training_data(tdata);
+ }
+ }
+
+ bool Method::install_training_method_data(const methodHandle& method) {
+ MethodTrainingData* mtd = MethodTrainingData::find(method);
+ if (mtd != nullptr && mtd->has_holder() && mtd->final_profile() != nullptr &&
+ mtd->holder() == method() && mtd->final_profile()->method() == method()) { // FIXME
+ Atomic::replace_if_null(&method->_method_data, mtd->final_profile());
+ return true;
+ }
+ return false;
+ }
+
// Build a MethodData* object to hold profiling information collected on this
// method when requested.
void Method::build_profiling_method_data(const methodHandle& method, TRAPS) {
+ if (install_training_method_data(method)) {
+ return;
+ }
// Do not profile the method if metaspace has hit an OOM previously
// allocating profiling data. Callers clear pending exception so don't
// add one here.
if (ClassLoaderDataGraph::has_metaspace_oom()) {
return;
if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
MetadataFactory::free_metadata(loader_data, method_data);
return;
}
! if (PrintMethodData && (Verbose || WizardMode)) {
ResourceMark rm(THREAD);
tty->print("build_profiling_method_data for ");
method->print_name(tty);
tty->cr();
// At the end of the run, the MDO, full of data, will be dumped.
if (!Atomic::replace_if_null(&method->_method_data, method_data)) {
MetadataFactory::free_metadata(loader_data, method_data);
return;
}
! /*
+ LogStreamHandle(Info, mdo) lsh;
+ if (lsh.is_enabled()) {
+ ResourceMark rm(THREAD);
+ lsh.print("build_profiling_method_data for ");
+ method->print_name(&lsh);
+ lsh.cr();
+ }
+ */
+ if (ForceProfiling && TrainingData::need_data()) {
+ MethodTrainingData* mtd = MethodTrainingData::make(method, false);
+ guarantee(mtd != nullptr, "");
+ }
+ if (PrintMethodData) {
ResourceMark rm(THREAD);
tty->print("build_profiling_method_data for ");
method->print_name(tty);
tty->cr();
// At the end of the run, the MDO, full of data, will be dumped.
if (!mh->init_method_counters(counters)) {
MetadataFactory::free_metadata(mh->method_holder()->class_loader_data(), counters);
}
+ if (ForceProfiling && TrainingData::need_data()) {
+ MethodTrainingData* mtd = MethodTrainingData::make(mh, false);
+ guarantee(mtd != nullptr, "");
+ }
+
return mh->method_counters();
}
bool Method::init_method_counters(MethodCounters* counters) {
// Try to install a pointer to MethodCounters, return true on success.
bool Method::needs_clinit_barrier() const {
return is_static() && !method_holder()->is_initialized();
}
+ bool Method::code_has_clinit_barriers() const {
+ nmethod* nm = code();
+ return (nm != nullptr) && nm->has_clinit_barriers();
+ }
+
bool Method::is_object_wait0() const {
return klass_name() == vmSymbols::java_lang_Object()
&& name() == vmSymbols::wait_name();
}
}
NOT_PRODUCT(set_compiled_invocation_count(0);)
clear_method_data();
clear_method_counters();
+ clear_is_not_c1_compilable();
+ clear_is_not_c1_osr_compilable();
+ clear_is_not_c2_compilable();
+ clear_is_not_c2_osr_compilable();
+ clear_queued_for_compilation();
+ set_pending_queue_processed(false);
remove_unshareable_flags();
}
void Method::remove_unshareable_flags() {
// clear all the flags that shouldn't be in the archived version
assert(!is_obsolete(), "must be");
assert(!is_deleted(), "must be");
set_is_prefixed_native(false);
set_queued_for_compilation(false);
+ set_pending_queue_processed(false);
set_is_not_c2_compilable(false);
set_is_not_c1_compilable(false);
set_is_not_c2_osr_compilable(false);
set_on_stack_flag(false);
+ set_has_upcall_on_method_entry(false);
+ set_has_upcall_on_method_exit(false);
}
#endif
// Called when the method_holder is getting linked. Setup entrypoints so the method
// is ready to be called from interpreter, compiler, and vtables.
THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
}
assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
}
}
}
address Method::make_adapters(const methodHandle& mh, TRAPS) {
! PerfTraceTime timer(ClassLoader::perf_method_adapters_time());
// Adapters for compiled code are made eagerly here. They are fairly
// small (generally < 100 bytes) and quick to make (and cached and shared)
// so making them eagerly shouldn't be too expensive.
AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Initial size of CodeCache is too small");
}
assert(_from_interpreted_entry == get_i2c_entry(), "invariant");
}
}
+ if (_preload_code != nullptr) {
+ MutexLocker ml(NMethodState_lock, Mutex::_no_safepoint_check_flag);
+ set_code(h_method, _preload_code);
+ assert(((nmethod*)_preload_code)->scc_entry() == _scc_entry, "sanity");
+ }
}
address Method::make_adapters(const methodHandle& mh, TRAPS) {
! PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
// Adapters for compiled code are made eagerly here. They are fairly
// small (generally < 100 bytes) and quick to make (and cached and shared)
// so making them eagerly shouldn't be too expensive.
AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
}
}
int Method::highest_comp_level() const {
const MethodCounters* mcs = method_counters();
if (mcs != nullptr) {
! return mcs->highest_comp_level();
} else {
return CompLevel_none;
}
}
}
}
int Method::highest_comp_level() const {
const MethodCounters* mcs = method_counters();
+ nmethod* nm = code();
+ int level = (nm != nullptr) ? nm->comp_level() : CompLevel_none;
if (mcs != nullptr) {
! return MAX2(mcs->highest_comp_level(), level);
} else {
return CompLevel_none;
}
}
< prev index next >