< prev index next > src/hotspot/share/c1/c1_LIRGenerator.cpp
Print this page
#include "c1/c1_ValueStack.hpp"
#include "ci/ciArrayKlass.hpp"
#include "ci/ciInstance.hpp"
#include "ci/ciObjArray.hpp"
#include "ci/ciUtilities.hpp"
+ #include "code/aotCodeCache.hpp"
#include "compiler/compilerDefinitions.inline.hpp"
#include "compiler/compilerOracle.hpp"
#include "gc/shared/barrierSet.hpp"
#include "gc/shared/c1/barrierSetC1.hpp"
#include "oops/klass.inline.hpp"
#include "oops/methodCounters.hpp"
+ #include "runtime/runtimeUpcalls.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/stubRoutines.hpp"
#include "runtime/vm_version.hpp"
#include "utilities/bitMap.inline.hpp"
#include "utilities/macros.hpp"
klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
// If klass is not loaded we do not know if the klass has finalizers:
if (UseFastNewInstance && klass->is_loaded()
&& !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
! C1StubId stub_id = klass->is_initialized() ? C1StubId::fast_new_instance_id : C1StubId::fast_new_instance_init_check_id;
CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
assert(klass->is_loaded(), "must be loaded");
// allocate space for instance
assert(klass->size_helper() > 0, "illegal instance size");
const int instance_size = align_object_size(klass->size_helper());
__ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
! oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path);
} else {
CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, C1StubId::new_instance_id);
__ branch(lir_cond_always, slow_path);
__ branch_destination(slow_path->continuation());
}
klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
// If klass is not loaded we do not know if the klass has finalizers:
if (UseFastNewInstance && klass->is_loaded()
&& !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
! bool known_initialized = klass->is_initialized() && !compilation()->env()->is_precompile();
+ C1StubId stub_id = known_initialized ? C1StubId::fast_new_instance_id : C1StubId::fast_new_instance_init_check_id;
CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
assert(klass->is_loaded(), "must be loaded");
// allocate space for instance
assert(klass->size_helper() > 0, "illegal instance size");
const int instance_size = align_object_size(klass->size_helper());
__ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
! oopDesc::header_size(), instance_size, klass_reg, !known_initialized, slow_path);
} else {
CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, C1StubId::new_instance_id);
__ branch(lir_cond_always, slow_path);
__ branch_destination(slow_path->continuation());
}
__ metadata2reg(method()->constant_encoding(), meth);
args->append(meth);
call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
}
+ MethodDetails method_details(method());
+ RuntimeUpcallInfo* upcall = RuntimeUpcalls::get_first_upcall(RuntimeUpcallType::onMethodEntry, method_details);
+ while (upcall != nullptr) {
+ BasicTypeList signature;
+ signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
+ LIR_OprList* args = new LIR_OprList();
+ args->append(getThreadPointer());
+ call_runtime(&signature, args, upcall->upcall_address(), voidType, nullptr);
+ upcall = RuntimeUpcalls::get_next_upcall(RuntimeUpcallType::onMethodEntry, method_details, upcall);
+ }
+
if (method()->is_synchronized()) {
LIR_Opr obj;
if (method()->is_static()) {
obj = new_register(T_OBJECT);
__ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
}
void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
ciMethod *method, LIR_Opr step, int frequency,
int bci, bool backedge, bool notify) {
+ if (PreloadOnly) {
+ // Nothing to do if we only use preload code.
+ return;
+ }
assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
int level = _compilation->env()->comp_level();
assert(level > CompLevel_simple, "Shouldn't be here");
int offset = -1;
MethodCounters* counters_adr = method->ensure_method_counters();
if (counters_adr == nullptr) {
bailout("method counters allocation failed");
return;
}
! counter_holder = new_pointer_register();
! __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
MethodCounters::invocation_counter_offset());
} else if (level == CompLevel_full_profile) {
counter_holder = new_register(T_METADATA);
offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
MethodCounters* counters_adr = method->ensure_method_counters();
if (counters_adr == nullptr) {
bailout("method counters allocation failed");
return;
}
! if (AOTCodeCache::is_on()) {
! counter_holder = new_register(T_METADATA);
+ __ metadata2reg(counters_adr, counter_holder);
+ } else {
+ counter_holder = new_pointer_register();
+ __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
+ }
offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
MethodCounters::invocation_counter_offset());
} else if (level == CompLevel_full_profile) {
counter_holder = new_register(T_METADATA);
offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
< prev index next >