< prev index next > src/hotspot/share/code/nmethod.cpp
Print this page
#include "code/compiledIC.hpp"
#include "code/dependencies.hpp"
#include "code/nativeInst.hpp"
#include "code/nmethod.inline.hpp"
#include "code/scopeDesc.hpp"
+ #include "code/SCCache.hpp"
#include "compiler/abstractCompiler.hpp"
#include "compiler/compilationLog.hpp"
#include "compiler/compileBroker.hpp"
#include "compiler/compileLog.hpp"
#include "compiler/compileTask.hpp"
klass = ((Klass*)md);
} else if (md->is_method()) {
klass = ((Method*)md)->method_holder();
} else if (md->is_methodData()) {
klass = ((MethodData*)md)->method()->method_holder();
+ } else if (md->is_methodCounters()) {
+ klass = ((MethodCounters*)md)->method()->method_holder();
} else {
md->print();
ShouldNotReachHere();
}
assert(klass->is_loader_alive(), "must be alive");
nm->log_new_nmethod();
}
return nm;
}
+ void nmethod::record_nmethod_dependency() {
+ // To make dependency checking during class loading fast, record
+ // the nmethod dependencies in the classes it is dependent on.
+ // This allows the dependency checking code to simply walk the
+ // class hierarchy above the loaded class, checking only nmethods
+ // which are dependent on those classes. The slow way is to
+ // check every nmethod for dependencies which makes it linear in
+ // the number of methods compiled. For applications with a lot
+ // classes the slow way is too slow.
+ for (Dependencies::DepStream deps(this); deps.next(); ) {
+ if (deps.type() == Dependencies::call_site_target_value) {
+ // CallSite dependencies are managed on per-CallSite instance basis.
+ oop call_site = deps.argument_oop(0);
+ MethodHandles::add_dependent_nmethod(call_site, this);
+ } else {
+ InstanceKlass* ik = deps.context_type();
+ if (ik == nullptr) {
+ continue; // ignore things like evol_method
+ }
+ // record this nmethod as dependent on this klass
+ ik->add_dependent_nmethod(this);
+ }
+ }
+ }
+
nmethod* nmethod::new_nmethod(const methodHandle& method,
int compile_id,
int entry_bci,
CodeOffsets* offsets,
int orig_pc_offset,
OopMapSet* oop_maps,
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
CompLevel comp_level
+ , SCCEntry* scc_entry
#if INCLUDE_JVMCI
, char* speculations,
int speculations_len,
JVMCINMethodData* jvmci_data
#endif
nm = new (nmethod_size, comp_level)
nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
debug_info, dependencies, code_buffer, frame_size, oop_maps,
- handler_table, nul_chk_table, compiler, comp_level
+ handler_table, nul_chk_table, compiler, comp_level, scc_entry
#if INCLUDE_JVMCI
, speculations,
speculations_len,
jvmci_data
#endif
);
if (nm != nullptr) {
- // To make dependency checking during class loading fast, record
- // the nmethod dependencies in the classes it is dependent on.
- // This allows the dependency checking code to simply walk the
- // class hierarchy above the loaded class, checking only nmethods
- // which are dependent on those classes. The slow way is to
- // check every nmethod for dependencies which makes it linear in
- // the number of methods compiled. For applications with a lot
- // classes the slow way is too slow.
- for (Dependencies::DepStream deps(nm); deps.next(); ) {
- if (deps.type() == Dependencies::call_site_target_value) {
- // CallSite dependencies are managed on per-CallSite instance basis.
- oop call_site = deps.argument_oop(0);
- MethodHandles::add_dependent_nmethod(call_site, nm);
- } else {
- InstanceKlass* ik = deps.context_type();
- if (ik == nullptr) {
- continue; // ignore things like evol_method
- }
- // record this nmethod as dependent on this klass
- ik->add_dependent_nmethod(nm);
- }
- }
- NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
+ nm->record_nmethod_dependency();
+ NOT_PRODUCT(note_java_nmethod(nm));
}
}
// Do verification and logging outside CodeCache_lock.
if (nm != nullptr) {
+
+ #ifdef ASSERT
+ LogTarget(Debug, scc, nmethod) log;
+ if (log.is_enabled()) {
+ LogStream out(log);
+ out.print_cr("== new_nmethod 2");
+ FlagSetting fs(PrintRelocations, true);
+ nm->print_on_impl(&out);
+ nm->decode(&out);
+ }
+ #endif
+
+ // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
+ DEBUG_ONLY(nm->verify();)
+ nm->log_new_nmethod();
+ }
+ return nm;
+ }
+
+ void nmethod::restore_from_archive(nmethod* archived_nm,
+ const methodHandle& method,
+ int compile_id,
+ address reloc_data,
+ GrowableArray<Handle>& oop_list,
+ GrowableArray<Metadata*>& metadata_list,
+ ImmutableOopMapSet* oop_maps,
+ address immutable_data,
+ GrowableArray<Handle>& reloc_imm_oop_list,
+ GrowableArray<Metadata*>& reloc_imm_metadata_list,
+ #ifndef PRODUCT
+ AsmRemarks& archived_asm_remarks,
+ DbgStrings& archived_dbg_strings,
+ #endif /* PRODUCT */
+ SCCReader* scc_reader)
+ {
+ archived_nm->copy_to((address)this);
+ set_name("nmethod");
+ set_method(method());
+
+ _compile_id = compile_id;
+ // allocate _mutable_data before copying relocation data because relocation data is now stored as part of mutable data area
+ if (archived_nm->mutable_data_size() > 0) {
+ _mutable_data = (address)os::malloc(archived_nm->mutable_data_size(), mtCode);
+ if (_mutable_data == nullptr) {
+ vm_exit_out_of_memory(archived_nm->mutable_data_size(), OOM_MALLOC_ERROR, "codebuffer: no space for mutable data");
+ }
+ }
+ memcpy((address)relocation_begin(), reloc_data, archived_nm->relocation_size());
+ set_oop_maps(oop_maps);
+ set_immutable_data(immutable_data);
+ copy_values(&oop_list);
+ copy_values(&metadata_list);
+
+ scc_reader->apply_relocations(this, reloc_imm_oop_list, reloc_imm_metadata_list);
+
+ #ifndef PRODUCT
+ AsmRemarks::init(asm_remarks());
+ use_remarks(archived_asm_remarks);
+ archived_asm_remarks.clear();
+ DbgStrings::init(dbg_strings());
+ use_strings(archived_dbg_strings);
+ archived_dbg_strings.clear();
+ #endif /* PRODUCT */
+
+ // Flush the code block
+ ICache::invalidate_range(code_begin(), code_size());
+
+ // Create cache after PcDesc data is copied - it will be used to initialize cache
+ _pc_desc_container = new PcDescContainer(scopes_pcs_begin());
+
+ set_scc_entry(scc_reader->scc_entry());
+
+ post_init();
+ }
+
+ nmethod* nmethod::new_nmethod(nmethod* archived_nm,
+ const methodHandle& method,
+ AbstractCompiler* compiler,
+ int compile_id,
+ address reloc_data,
+ GrowableArray<Handle>& oop_list,
+ GrowableArray<Metadata*>& metadata_list,
+ ImmutableOopMapSet* oop_maps,
+ address immutable_data,
+ GrowableArray<Handle>& reloc_imm_oop_list,
+ GrowableArray<Metadata*>& reloc_imm_metadata_list,
+ #ifndef PRODUCT
+ AsmRemarks& asm_remarks,
+ DbgStrings& dbg_strings,
+ #endif /* PRODUCT */
+ SCCReader* scc_reader)
+ {
+ nmethod* nm = nullptr;
+ int nmethod_size = archived_nm->size();
+ // create nmethod
+ {
+ MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
+ nm = (nmethod *)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
+ if (nm != nullptr) {
+ nm->restore_from_archive(archived_nm,
+ method,
+ compile_id,
+ reloc_data,
+ oop_list,
+ metadata_list,
+ oop_maps,
+ immutable_data,
+ reloc_imm_oop_list,
+ reloc_imm_metadata_list,
+ NOT_PRODUCT_ARG(asm_remarks)
+ NOT_PRODUCT_ARG(dbg_strings)
+ scc_reader);
+ nm->record_nmethod_dependency();
+ NOT_PRODUCT(note_java_nmethod(nm));
+ }
+ }
+ // Do verification and logging outside CodeCache_lock.
+ if (nm != nullptr) {
+ #ifdef ASSERT
+ LogTarget(Debug, scc, nmethod) log;
+ if (log.is_enabled()) {
+ LogStream out(log);
+ out.print_cr("== new_nmethod 2");
+ FlagSetting fs(PrintRelocations, true);
+ nm->print_on_impl(&out);
+ nm->decode(&out);
+ }
+ #endif
// Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
DEBUG_ONLY(nm->verify();)
nm->log_new_nmethod();
}
return nm;
_has_monitors = 0;
_has_scoped_access = 0;
_has_flushed_dependencies = 0;
_is_unlinked = 0;
_load_reported = 0; // jvmti state
+ _preloaded = 0;
+ _has_clinit_barriers = 0;
+ _used = false;
_deoptimization_status = not_marked;
// SECT_CONSTS is first in code buffer so the offset should be 0.
int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
assert(consts_offset == 0, "const_offset: %d", consts_offset);
}
// Native wrappers do not have deopt handlers. Make the values
// something that will never match a pc like the nmethod vtable entry
_deopt_handler_offset = 0;
_deopt_mh_handler_offset = 0;
+ _scc_entry = nullptr;
+ _method_profiling_count = 0;
_unwind_handler_offset = 0;
CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
int metadata_size = align_up(code_buffer->total_metadata_size(), wordSize);
JVMCI_ONLY( _jvmci_data_size = 0; )
print(); // print the header part only.
}
#if defined(SUPPORT_DATA_STRUCTS)
if (AbstractDisassembler::show_structs()) {
if (PrintRelocations) {
- print_relocations();
+ print_relocations_on(tty);
tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
}
}
#endif
if (xtty != nullptr) {
OopMapSet* oop_maps,
ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler,
CompLevel comp_level
+ , SCCEntry* scc_entry
#if INCLUDE_JVMCI
, char* speculations,
int speculations_len,
JVMCINMethodData* jvmci_data
#endif
{
debug_only(NoSafepointVerifier nsv;)
assert_locked_or_safepoint(CodeCache_lock);
init_defaults(code_buffer, offsets);
+ _scc_entry = scc_entry;
+ _method_profiling_count = 0;
_osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
_entry_bci = entry_bci;
_compile_id = compile_id;
_comp_level = comp_level;
}
// Print a short set of xml attributes to identify this nmethod. The
// output should be embedded in some other element.
void nmethod::log_identity(xmlStream* log) const {
- log->print(" compile_id='%d'", compile_id());
+ assert(log->inside_attrs_or_error(), "printing attributes");
+ log->print(" code_compile_id='%d'", compile_id());
const char* nm_kind = compile_kind();
- if (nm_kind != nullptr) log->print(" compile_kind='%s'", nm_kind);
- log->print(" compiler='%s'", compiler_name());
+ if (nm_kind != nullptr) log->print(" code_compile_kind='%s'", nm_kind);
+ log->print(" code_compiler='%s'", compiler_name());
if (TieredCompilation) {
- log->print(" level='%d'", comp_level());
+ log->print(" code_compile_level='%d'", comp_level());
}
#if INCLUDE_JVMCI
if (jvmci_nmethod_data() != nullptr) {
const char* jvmci_name = jvmci_nmethod_data()->name();
if (jvmci_name != nullptr) {
if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
print_scopes();
tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
}
if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
- print_relocations();
+ print_relocations_on(tty);
tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
}
if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
print_dependencies_on(tty);
tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
} else {
*dest = JNIHandles::resolve_non_null(handle);
}
}
+ void nmethod::copy_values(GrowableArray<Handle>* array) {
+ int length = array->length();
+ assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
+ oop* dest = oops_begin();
+ for (int index = 0 ; index < length; index++) {
+ dest[index] = array->at(index)();
+ }
+ }
// Have to have the same name because it's called by a template
void nmethod::copy_values(GrowableArray<jobject>* array) {
int length = array->length();
assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
reloc->fix_metadata_relocation();
}
}
}
+ void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
+ RelocIterator iter(this);
+ while (iter.next()) {
+ if (iter.type() == relocInfo::oop_type) {
+ oop_Relocation* reloc = iter.oop_reloc();
+ if (reloc->oop_is_immediate()) {
+ oop dest = reloc->oop_value();
+ Handle h(thread, dest);
+ oop_list.append(h);
+ }
+ } else if (iter.type() == relocInfo::metadata_type) {
+ metadata_Relocation* reloc = iter.metadata_reloc();
+ if (reloc->metadata_is_immediate()) {
+ Metadata* m = reloc->metadata_value();
+ metadata_list.append(m);
+ }
+ }
+ }
+ }
+
static void install_post_call_nop_displacement(nmethod* nm, address pc) {
NativePostCallNop* nop = nativePostCallNop_at((address) pc);
intptr_t cbaddr = (intptr_t) nm;
intptr_t offset = ((intptr_t) pc) - cbaddr;
if (mdo == nullptr) return;
// There is a benign race here. See comments in methodData.hpp.
mdo->inc_decompile_count();
}
+ void nmethod::inc_method_profiling_count() {
+ Atomic::inc(&_method_profiling_count);
+ }
+
+ uint64_t nmethod::method_profiling_count() {
+ return _method_profiling_count;
+ }
+
bool nmethod::try_transition(signed char new_state_int) {
signed char new_state = new_state_int;
assert_lock_strong(NMethodState_lock);
signed char old_state = _state;
if (old_state >= new_state) {
method()->unlink_code(this);
}
}
// Invalidate code
- bool nmethod::make_not_entrant(const char* reason) {
+ bool nmethod::make_not_entrant(const char* reason, bool make_not_entrant) {
assert(reason != nullptr, "Must provide a reason");
// This can be called while the system is already at a safepoint which is ok
NoSafepointVerifier nsv;
log_state_change(reason);
// Remove nmethod from method.
unlink_from_method();
+ if (make_not_entrant) {
+ // Keep cached code if it was simply replaced
+ // otherwise make it not entrant too.
+ SCCache::invalidate(_scc_entry);
+ }
+
+ CompileBroker::log_not_entrant(this);
} // leave critical region under NMethodState_lock
#if INCLUDE_JVMCI
// Invalidate can't occur while holding the NMethodState_lock
JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
ec = next;
}
if (_pc_desc_container != nullptr) {
delete _pc_desc_container;
}
- delete[] _compiled_ic_data;
+ if (_compiled_ic_data != nullptr) {
+ delete[] _compiled_ic_data;
+ }
- if (_immutable_data != blob_end()) {
+ if (_immutable_data != data_end() && !SCCache::is_address_in_aot_cache((address)_oop_maps)) {
os::free(_immutable_data);
_immutable_data = blob_end(); // Valid not null address
}
if (unregister_nmethod) {
Universe::heap()->unregister_nmethod(this);
task->mark_success();
task->set_nm_content_size(content_size());
task->set_nm_insts_size(insts_size());
task->set_nm_total_size(total_size());
+ // task->is_scc() is true only for loaded cached code.
+ // nmethod::_scc_entry is set for loaded and stored cached code
+ // to invalidate the entry when nmethod is deoptimized.
+ // There is option to not store in archive cached code.
+ guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
+
// JVMTI -- compiled method notification (must be done outside lock)
post_compiled_method_load_event();
if (CompilationLog::log() != nullptr) {
CompilationLog::log()->log_nmethod(JavaThread::current(), this);
nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
if (nm != this) {
fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
}
- for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
- if (! p->verify(this)) {
- tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
+ // Verification can triggered during shutdown after SCCache is closed.
+ // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
+ if (!is_scc() || SCCache::is_on()) {
+ for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
+ if (! p->verify(this)) {
+ tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
+ }
}
- }
#ifdef ASSERT
#if INCLUDE_JVMCI
- {
- // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
- ImmutableOopMapSet* oms = oop_maps();
- ImplicitExceptionTable implicit_table(this);
- for (uint i = 0; i < implicit_table.len(); i++) {
- int exec_offset = (int) implicit_table.get_exec_offset(i);
- if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
- assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
- bool found = false;
- for (int i = 0, imax = oms->count(); i < imax; i++) {
- if (oms->pair_at(i)->pc_offset() == exec_offset) {
- found = true;
- break;
+ {
+ // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
+ ImmutableOopMapSet* oms = oop_maps();
+ ImplicitExceptionTable implicit_table(this);
+ for (uint i = 0; i < implicit_table.len(); i++) {
+ int exec_offset = (int) implicit_table.get_exec_offset(i);
+ if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
+ assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
+ bool found = false;
+ for (int i = 0, imax = oms->count(); i < imax; i++) {
+ if (oms->pair_at(i)->pc_offset() == exec_offset) {
+ found = true;
+ break;
+ }
}
+ assert(found, "missing oopmap");
}
- assert(found, "missing oopmap");
}
}
- }
#endif
#endif
+ }
VerifyOopsClosure voc(this);
oops_do(&voc);
assert(voc.ok(), "embedded oops must be OK");
Universe::heap()->verify_nmethod(this);
assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
nm->method()->external_name(), p2i(_oops_do_mark_link));
- verify_scopes();
+ if (!is_scc() || SCCache::is_on()) {
+ verify_scopes();
+ }
CompiledICLocker nm_verify(this);
VerifyMetadataClosure vmc;
metadata_do(&vmc);
}
if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
p2i(speculations_begin()),
p2i(speculations_end()),
speculations_size());
#endif
+ if (SCCache::is_on() && _scc_entry != nullptr) {
+ _scc_entry->print(st);
+ }
}
void nmethod::print_code() {
ResourceMark m;
ttyLocker ttyl;
}
}
#endif
#ifndef PRODUCT // RelocIterator does support printing only then.
- void nmethod::print_relocations() {
+ void nmethod::print_relocations_on(outputStream* st) {
ResourceMark m; // in case methods get printed via the debugger
- tty->print_cr("relocations:");
+ st->print_cr("relocations:");
RelocIterator iter(this);
- iter.print();
+ iter.print_on(st);
}
#endif
void nmethod::print_pcs_on(outputStream* st) {
ResourceMark m; // in case methods get printed via debugger
return jvmci_nmethod_data()->name();
}
return nullptr;
}
#endif
+
+ void nmethod::prepare_for_archiving() {
+ CodeBlob::prepare_for_archiving();
+ _deoptimization_generation = 0;
+ _gc_epoch = 0;
+ _method_profiling_count = 0;
+ _osr_link = nullptr;
+ _method = nullptr;
+ _immutable_data = nullptr;
+ _pc_desc_container = nullptr;
+ _exception_cache = nullptr;
+ _gc_data = nullptr;
+ _oops_do_mark_link = nullptr;
+ _compiled_ic_data = nullptr;
+ _osr_entry_point = nullptr;
+ _compile_id = -1;
+ _deoptimization_status = not_marked;
+ _is_unloading_state = 0;
+ _state = not_installed;
+ }
< prev index next >