16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Compilation.hpp"
26 #include "c1/c1_Defs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_Instruction.hpp"
29 #include "c1/c1_LIRAssembler.hpp"
30 #include "c1/c1_LIRGenerator.hpp"
31 #include "c1/c1_ValueStack.hpp"
32 #include "ci/ciArrayKlass.hpp"
33 #include "ci/ciInstance.hpp"
34 #include "ci/ciObjArray.hpp"
35 #include "ci/ciUtilities.hpp"
36 #include "compiler/compilerDefinitions.inline.hpp"
37 #include "compiler/compilerOracle.hpp"
38 #include "gc/shared/barrierSet.hpp"
39 #include "gc/shared/c1/barrierSetC1.hpp"
40 #include "oops/klass.inline.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "runtime/stubRoutines.hpp"
44 #include "runtime/vm_version.hpp"
45 #include "utilities/bitMap.inline.hpp"
46 #include "utilities/macros.hpp"
47 #include "utilities/powerOfTwo.hpp"
48
49 #ifdef ASSERT
50 #define __ gen()->lir(__FILE__, __LINE__)->
51 #else
52 #define __ gen()->lir()->
53 #endif
54
55 #ifndef PATCHED_ADDR
56 #define PATCHED_ADDR (max_jint)
57 #endif
58
59 void PhiResolverState::reset() {
60 _virtual_operands.clear();
61 _other_operands.clear();
639 __ load_stack_address_monitor(monitor_no, lock);
640 __ unlock_object(hdr, object, lock, scratch, slow_path);
641 }
642
643 #ifndef PRODUCT
644 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
645 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
646 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
647 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
648 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
649 }
650 }
651 #endif
652
653 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
654 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
655 // If klass is not loaded we do not know if the klass has finalizers:
656 if (UseFastNewInstance && klass->is_loaded()
657 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
658
659 StubId stub_id = klass->is_initialized() ? StubId::c1_fast_new_instance_id : StubId::c1_fast_new_instance_init_check_id;
660
661 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
662
663 assert(klass->is_loaded(), "must be loaded");
664 // allocate space for instance
665 assert(klass->size_helper() > 0, "illegal instance size");
666 const int instance_size = align_object_size(klass->size_helper());
667 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
668 oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path);
669 } else {
670 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, StubId::c1_new_instance_id);
671 __ branch(lir_cond_always, slow_path);
672 __ branch_destination(slow_path->continuation());
673 }
674 }
675
676
677 static bool is_constant_zero(Instruction* inst) {
678 IntConstant* c = inst->type()->as_IntConstant();
679 if (c) {
680 return (c->value() == 0);
681 }
682 return false;
683 }
684
685
686 static bool positive_constant(Instruction* inst) {
687 IntConstant* c = inst->type()->as_IntConstant();
688 if (c) {
2554 #endif // __SOFTFP__
2555 local->set_operand(dest);
2556 #ifdef ASSERT
2557 _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2558 #endif
2559 java_index += type2size[t];
2560 }
2561
2562 if (compilation()->env()->dtrace_method_probes()) {
2563 BasicTypeList signature;
2564 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2565 signature.append(T_METADATA); // Method*
2566 LIR_OprList* args = new LIR_OprList();
2567 args->append(getThreadPointer());
2568 LIR_Opr meth = new_register(T_METADATA);
2569 __ metadata2reg(method()->constant_encoding(), meth);
2570 args->append(meth);
2571 call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2572 }
2573
2574 if (method()->is_synchronized()) {
2575 LIR_Opr obj;
2576 if (method()->is_static()) {
2577 obj = new_register(T_OBJECT);
2578 __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2579 } else {
2580 Local* receiver = x->state()->local_at(0)->as_Local();
2581 assert(receiver != nullptr, "must already exist");
2582 obj = receiver->operand();
2583 }
2584 assert(obj->is_valid(), "must be valid");
2585
2586 if (method()->is_synchronized()) {
2587 LIR_Opr lock = syncLockOpr();
2588 __ load_stack_address_monitor(0, lock);
2589
2590 CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2591 CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2592
2593 // receiver is guaranteed non-null so don't need CodeEmitInfo
3147 int freq_log = 0;
3148 int level = compilation()->env()->comp_level();
3149 if (level == CompLevel_limited_profile) {
3150 freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3151 } else if (level == CompLevel_full_profile) {
3152 freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3153 } else {
3154 ShouldNotReachHere();
3155 }
3156 // Increment the appropriate invocation/backedge counter and notify the runtime.
3157 double scale;
3158 if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3159 freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3160 }
3161 increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3162 }
3163
3164 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3165 ciMethod *method, LIR_Opr step, int frequency,
3166 int bci, bool backedge, bool notify) {
3167 assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3168 int level = _compilation->env()->comp_level();
3169 assert(level > CompLevel_simple, "Shouldn't be here");
3170
3171 int offset = -1;
3172 LIR_Opr counter_holder;
3173 if (level == CompLevel_limited_profile) {
3174 MethodCounters* counters_adr = method->ensure_method_counters();
3175 if (counters_adr == nullptr) {
3176 bailout("method counters allocation failed");
3177 return;
3178 }
3179 counter_holder = new_pointer_register();
3180 __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3181 offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3182 MethodCounters::invocation_counter_offset());
3183 } else if (level == CompLevel_full_profile) {
3184 counter_holder = new_register(T_METADATA);
3185 offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3186 MethodData::invocation_counter_offset());
3187 ciMethodData* md = method->method_data_or_null();
3188 assert(md != nullptr, "Sanity");
3189 __ metadata2reg(md->constant_encoding(), counter_holder);
3190 } else {
3191 ShouldNotReachHere();
3192 }
3193 LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3194 LIR_Opr result = new_register(T_INT);
3195 __ load(counter, result);
3196 __ add(result, step, result);
3197 __ store(result, counter);
3198 if (notify && (!backedge || UseOnStackReplacement)) {
3199 LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3200 // The bci for info can point to cmp for if's we want the if bci
|
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Compilation.hpp"
26 #include "c1/c1_Defs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_Instruction.hpp"
29 #include "c1/c1_LIRAssembler.hpp"
30 #include "c1/c1_LIRGenerator.hpp"
31 #include "c1/c1_ValueStack.hpp"
32 #include "ci/ciArrayKlass.hpp"
33 #include "ci/ciInstance.hpp"
34 #include "ci/ciObjArray.hpp"
35 #include "ci/ciUtilities.hpp"
36 #include "code/aotCodeCache.hpp"
37 #include "compiler/compilerDefinitions.inline.hpp"
38 #include "compiler/compilerOracle.hpp"
39 #include "gc/shared/barrierSet.hpp"
40 #include "gc/shared/c1/barrierSetC1.hpp"
41 #include "oops/klass.inline.hpp"
42 #include "oops/methodCounters.hpp"
43 #include "runtime/runtimeUpcalls.hpp"
44 #include "runtime/sharedRuntime.hpp"
45 #include "runtime/stubRoutines.hpp"
46 #include "runtime/vm_version.hpp"
47 #include "utilities/bitMap.inline.hpp"
48 #include "utilities/macros.hpp"
49 #include "utilities/powerOfTwo.hpp"
50
51 #ifdef ASSERT
52 #define __ gen()->lir(__FILE__, __LINE__)->
53 #else
54 #define __ gen()->lir()->
55 #endif
56
57 #ifndef PATCHED_ADDR
58 #define PATCHED_ADDR (max_jint)
59 #endif
60
61 void PhiResolverState::reset() {
62 _virtual_operands.clear();
63 _other_operands.clear();
641 __ load_stack_address_monitor(monitor_no, lock);
642 __ unlock_object(hdr, object, lock, scratch, slow_path);
643 }
644
645 #ifndef PRODUCT
646 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
647 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
648 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
649 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
650 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
651 }
652 }
653 #endif
654
655 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
656 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
657 // If klass is not loaded we do not know if the klass has finalizers:
658 if (UseFastNewInstance && klass->is_loaded()
659 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
660
661 bool known_initialized = klass->is_initialized() && !compilation()->env()->is_precompile();
662 StubId stub_id = known_initialized ? StubId::c1_fast_new_instance_id : StubId::c1_fast_new_instance_init_check_id;
663
664 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
665
666 assert(klass->is_loaded(), "must be loaded");
667 // allocate space for instance
668 assert(klass->size_helper() > 0, "illegal instance size");
669 const int instance_size = align_object_size(klass->size_helper());
670 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
671 oopDesc::header_size(), instance_size, klass_reg, !known_initialized, slow_path);
672 } else {
673 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, StubId::c1_new_instance_id);
674 __ branch(lir_cond_always, slow_path);
675 __ branch_destination(slow_path->continuation());
676 }
677 }
678
679
680 static bool is_constant_zero(Instruction* inst) {
681 IntConstant* c = inst->type()->as_IntConstant();
682 if (c) {
683 return (c->value() == 0);
684 }
685 return false;
686 }
687
688
689 static bool positive_constant(Instruction* inst) {
690 IntConstant* c = inst->type()->as_IntConstant();
691 if (c) {
2557 #endif // __SOFTFP__
2558 local->set_operand(dest);
2559 #ifdef ASSERT
2560 _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2561 #endif
2562 java_index += type2size[t];
2563 }
2564
2565 if (compilation()->env()->dtrace_method_probes()) {
2566 BasicTypeList signature;
2567 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2568 signature.append(T_METADATA); // Method*
2569 LIR_OprList* args = new LIR_OprList();
2570 args->append(getThreadPointer());
2571 LIR_Opr meth = new_register(T_METADATA);
2572 __ metadata2reg(method()->constant_encoding(), meth);
2573 args->append(meth);
2574 call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2575 }
2576
2577 MethodDetails method_details(method());
2578 RuntimeUpcallInfo* upcall = RuntimeUpcalls::get_first_upcall(RuntimeUpcallType::onMethodEntry, method_details);
2579 while (upcall != nullptr) {
2580 BasicTypeList signature;
2581 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2582 LIR_OprList* args = new LIR_OprList();
2583 args->append(getThreadPointer());
2584 call_runtime(&signature, args, upcall->upcall_address(), voidType, nullptr);
2585 upcall = RuntimeUpcalls::get_next_upcall(RuntimeUpcallType::onMethodEntry, method_details, upcall);
2586 }
2587
2588 if (method()->is_synchronized()) {
2589 LIR_Opr obj;
2590 if (method()->is_static()) {
2591 obj = new_register(T_OBJECT);
2592 __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2593 } else {
2594 Local* receiver = x->state()->local_at(0)->as_Local();
2595 assert(receiver != nullptr, "must already exist");
2596 obj = receiver->operand();
2597 }
2598 assert(obj->is_valid(), "must be valid");
2599
2600 if (method()->is_synchronized()) {
2601 LIR_Opr lock = syncLockOpr();
2602 __ load_stack_address_monitor(0, lock);
2603
2604 CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2605 CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2606
2607 // receiver is guaranteed non-null so don't need CodeEmitInfo
3161 int freq_log = 0;
3162 int level = compilation()->env()->comp_level();
3163 if (level == CompLevel_limited_profile) {
3164 freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3165 } else if (level == CompLevel_full_profile) {
3166 freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3167 } else {
3168 ShouldNotReachHere();
3169 }
3170 // Increment the appropriate invocation/backedge counter and notify the runtime.
3171 double scale;
3172 if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3173 freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3174 }
3175 increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3176 }
3177
3178 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3179 ciMethod *method, LIR_Opr step, int frequency,
3180 int bci, bool backedge, bool notify) {
3181 if (PreloadOnly) {
3182 // Nothing to do if we only use preload code.
3183 return;
3184 }
3185 assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3186 int level = _compilation->env()->comp_level();
3187 assert(level > CompLevel_simple, "Shouldn't be here");
3188
3189 int offset = -1;
3190 LIR_Opr counter_holder;
3191 if (level == CompLevel_limited_profile) {
3192 MethodCounters* counters_adr = method->ensure_method_counters();
3193 if (counters_adr == nullptr) {
3194 bailout("method counters allocation failed");
3195 return;
3196 }
3197 if (AOTCodeCache::is_on()) {
3198 counter_holder = new_register(T_METADATA);
3199 __ metadata2reg(counters_adr, counter_holder);
3200 } else {
3201 counter_holder = new_pointer_register();
3202 __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3203 }
3204 offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3205 MethodCounters::invocation_counter_offset());
3206 } else if (level == CompLevel_full_profile) {
3207 counter_holder = new_register(T_METADATA);
3208 offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3209 MethodData::invocation_counter_offset());
3210 ciMethodData* md = method->method_data_or_null();
3211 assert(md != nullptr, "Sanity");
3212 __ metadata2reg(md->constant_encoding(), counter_holder);
3213 } else {
3214 ShouldNotReachHere();
3215 }
3216 LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3217 LIR_Opr result = new_register(T_INT);
3218 __ load(counter, result);
3219 __ add(result, step, result);
3220 __ store(result, counter);
3221 if (notify && (!backedge || UseOnStackReplacement)) {
3222 LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3223 // The bci for info can point to cmp for if's we want the if bci
|