16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Compilation.hpp"
26 #include "c1/c1_Defs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_Instruction.hpp"
29 #include "c1/c1_LIRAssembler.hpp"
30 #include "c1/c1_LIRGenerator.hpp"
31 #include "c1/c1_ValueStack.hpp"
32 #include "ci/ciArrayKlass.hpp"
33 #include "ci/ciInstance.hpp"
34 #include "ci/ciObjArray.hpp"
35 #include "ci/ciUtilities.hpp"
36 #include "compiler/compilerDefinitions.inline.hpp"
37 #include "compiler/compilerOracle.hpp"
38 #include "gc/shared/barrierSet.hpp"
39 #include "gc/shared/c1/barrierSetC1.hpp"
40 #include "oops/klass.inline.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "runtime/sharedRuntime.hpp"
43 #include "runtime/stubRoutines.hpp"
44 #include "runtime/vm_version.hpp"
45 #include "utilities/bitMap.inline.hpp"
46 #include "utilities/macros.hpp"
47 #include "utilities/powerOfTwo.hpp"
48
49 #ifdef ASSERT
50 #define __ gen()->lir(__FILE__, __LINE__)->
51 #else
52 #define __ gen()->lir()->
53 #endif
54
55 #ifndef PATCHED_ADDR
56 #define PATCHED_ADDR (max_jint)
57 #endif
58
59 void PhiResolverState::reset() {
60 _virtual_operands.clear();
61 _other_operands.clear();
641 __ load_stack_address_monitor(monitor_no, lock);
642 __ unlock_object(hdr, object, lock, scratch, slow_path);
643 }
644
645 #ifndef PRODUCT
646 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
647 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
648 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
649 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
650 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
651 }
652 }
653 #endif
654
655 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
656 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
657 // If klass is not loaded we do not know if the klass has finalizers:
658 if (UseFastNewInstance && klass->is_loaded()
659 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
660
661 C1StubId stub_id = klass->is_initialized() ? C1StubId::fast_new_instance_id : C1StubId::fast_new_instance_init_check_id;
662
663 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
664
665 assert(klass->is_loaded(), "must be loaded");
666 // allocate space for instance
667 assert(klass->size_helper() > 0, "illegal instance size");
668 const int instance_size = align_object_size(klass->size_helper());
669 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
670 oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path);
671 } else {
672 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, C1StubId::new_instance_id);
673 __ branch(lir_cond_always, slow_path);
674 __ branch_destination(slow_path->continuation());
675 }
676 }
677
678
679 static bool is_constant_zero(Instruction* inst) {
680 IntConstant* c = inst->type()->as_IntConstant();
681 if (c) {
682 return (c->value() == 0);
683 }
684 return false;
685 }
686
687
688 static bool positive_constant(Instruction* inst) {
689 IntConstant* c = inst->type()->as_IntConstant();
690 if (c) {
2556 #endif // __SOFTFP__
2557 local->set_operand(dest);
2558 #ifdef ASSERT
2559 _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2560 #endif
2561 java_index += type2size[t];
2562 }
2563
2564 if (compilation()->env()->dtrace_method_probes()) {
2565 BasicTypeList signature;
2566 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2567 signature.append(T_METADATA); // Method*
2568 LIR_OprList* args = new LIR_OprList();
2569 args->append(getThreadPointer());
2570 LIR_Opr meth = new_register(T_METADATA);
2571 __ metadata2reg(method()->constant_encoding(), meth);
2572 args->append(meth);
2573 call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2574 }
2575
2576 if (method()->is_synchronized()) {
2577 LIR_Opr obj;
2578 if (method()->is_static()) {
2579 obj = new_register(T_OBJECT);
2580 __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2581 } else {
2582 Local* receiver = x->state()->local_at(0)->as_Local();
2583 assert(receiver != nullptr, "must already exist");
2584 obj = receiver->operand();
2585 }
2586 assert(obj->is_valid(), "must be valid");
2587
2588 if (method()->is_synchronized() && GenerateSynchronizationCode) {
2589 LIR_Opr lock = syncLockOpr();
2590 __ load_stack_address_monitor(0, lock);
2591
2592 CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2593 CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2594
2595 // receiver is guaranteed non-null so don't need CodeEmitInfo
3148 int freq_log = 0;
3149 int level = compilation()->env()->comp_level();
3150 if (level == CompLevel_limited_profile) {
3151 freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3152 } else if (level == CompLevel_full_profile) {
3153 freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3154 } else {
3155 ShouldNotReachHere();
3156 }
3157 // Increment the appropriate invocation/backedge counter and notify the runtime.
3158 double scale;
3159 if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3160 freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3161 }
3162 increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3163 }
3164
3165 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3166 ciMethod *method, LIR_Opr step, int frequency,
3167 int bci, bool backedge, bool notify) {
3168 assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3169 int level = _compilation->env()->comp_level();
3170 assert(level > CompLevel_simple, "Shouldn't be here");
3171
3172 int offset = -1;
3173 LIR_Opr counter_holder;
3174 if (level == CompLevel_limited_profile) {
3175 MethodCounters* counters_adr = method->ensure_method_counters();
3176 if (counters_adr == nullptr) {
3177 bailout("method counters allocation failed");
3178 return;
3179 }
3180 counter_holder = new_pointer_register();
3181 __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3182 offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3183 MethodCounters::invocation_counter_offset());
3184 } else if (level == CompLevel_full_profile) {
3185 counter_holder = new_register(T_METADATA);
3186 offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3187 MethodData::invocation_counter_offset());
3188 ciMethodData* md = method->method_data_or_null();
3189 assert(md != nullptr, "Sanity");
3190 __ metadata2reg(md->constant_encoding(), counter_holder);
3191 } else {
3192 ShouldNotReachHere();
3193 }
3194 LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3195 LIR_Opr result = new_register(T_INT);
3196 __ load(counter, result);
3197 __ add(result, step, result);
3198 __ store(result, counter);
3199 if (notify && (!backedge || UseOnStackReplacement)) {
3200 LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3201 // The bci for info can point to cmp for if's we want the if bci
|
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Compilation.hpp"
26 #include "c1/c1_Defs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_Instruction.hpp"
29 #include "c1/c1_LIRAssembler.hpp"
30 #include "c1/c1_LIRGenerator.hpp"
31 #include "c1/c1_ValueStack.hpp"
32 #include "ci/ciArrayKlass.hpp"
33 #include "ci/ciInstance.hpp"
34 #include "ci/ciObjArray.hpp"
35 #include "ci/ciUtilities.hpp"
36 #include "code/aotCodeCache.hpp"
37 #include "compiler/compilerDefinitions.inline.hpp"
38 #include "compiler/compilerOracle.hpp"
39 #include "gc/shared/barrierSet.hpp"
40 #include "gc/shared/c1/barrierSetC1.hpp"
41 #include "oops/klass.inline.hpp"
42 #include "oops/methodCounters.hpp"
43 #include "runtime/runtimeUpcalls.hpp"
44 #include "runtime/sharedRuntime.hpp"
45 #include "runtime/stubRoutines.hpp"
46 #include "runtime/vm_version.hpp"
47 #include "utilities/bitMap.inline.hpp"
48 #include "utilities/macros.hpp"
49 #include "utilities/powerOfTwo.hpp"
50
51 #ifdef ASSERT
52 #define __ gen()->lir(__FILE__, __LINE__)->
53 #else
54 #define __ gen()->lir()->
55 #endif
56
57 #ifndef PATCHED_ADDR
58 #define PATCHED_ADDR (max_jint)
59 #endif
60
61 void PhiResolverState::reset() {
62 _virtual_operands.clear();
63 _other_operands.clear();
643 __ load_stack_address_monitor(monitor_no, lock);
644 __ unlock_object(hdr, object, lock, scratch, slow_path);
645 }
646
647 #ifndef PRODUCT
648 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
649 if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
650 tty->print_cr(" ###class not loaded at new bci %d", new_instance->printable_bci());
651 } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
652 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci());
653 }
654 }
655 #endif
656
657 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
658 klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
659 // If klass is not loaded we do not know if the klass has finalizers:
660 if (UseFastNewInstance && klass->is_loaded()
661 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
662
663 bool known_initialized = klass->is_initialized() && !compilation()->env()->is_precompile();
664 C1StubId stub_id = known_initialized ? C1StubId::fast_new_instance_id : C1StubId::fast_new_instance_init_check_id;
665
666 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
667
668 assert(klass->is_loaded(), "must be loaded");
669 // allocate space for instance
670 assert(klass->size_helper() > 0, "illegal instance size");
671 const int instance_size = align_object_size(klass->size_helper());
672 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
673 oopDesc::header_size(), instance_size, klass_reg, !known_initialized, slow_path);
674 } else {
675 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, C1StubId::new_instance_id);
676 __ branch(lir_cond_always, slow_path);
677 __ branch_destination(slow_path->continuation());
678 }
679 }
680
681
682 static bool is_constant_zero(Instruction* inst) {
683 IntConstant* c = inst->type()->as_IntConstant();
684 if (c) {
685 return (c->value() == 0);
686 }
687 return false;
688 }
689
690
691 static bool positive_constant(Instruction* inst) {
692 IntConstant* c = inst->type()->as_IntConstant();
693 if (c) {
2559 #endif // __SOFTFP__
2560 local->set_operand(dest);
2561 #ifdef ASSERT
2562 _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2563 #endif
2564 java_index += type2size[t];
2565 }
2566
2567 if (compilation()->env()->dtrace_method_probes()) {
2568 BasicTypeList signature;
2569 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2570 signature.append(T_METADATA); // Method*
2571 LIR_OprList* args = new LIR_OprList();
2572 args->append(getThreadPointer());
2573 LIR_Opr meth = new_register(T_METADATA);
2574 __ metadata2reg(method()->constant_encoding(), meth);
2575 args->append(meth);
2576 call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2577 }
2578
2579 MethodDetails method_details(method());
2580 RuntimeUpcallInfo* upcall = RuntimeUpcalls::get_first_upcall(RuntimeUpcallType::onMethodEntry, method_details);
2581 while (upcall != nullptr) {
2582 BasicTypeList signature;
2583 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2584 LIR_OprList* args = new LIR_OprList();
2585 args->append(getThreadPointer());
2586 call_runtime(&signature, args, upcall->upcall_address(), voidType, nullptr);
2587 upcall = RuntimeUpcalls::get_next_upcall(RuntimeUpcallType::onMethodEntry, method_details, upcall);
2588 }
2589
2590 if (method()->is_synchronized()) {
2591 LIR_Opr obj;
2592 if (method()->is_static()) {
2593 obj = new_register(T_OBJECT);
2594 __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2595 } else {
2596 Local* receiver = x->state()->local_at(0)->as_Local();
2597 assert(receiver != nullptr, "must already exist");
2598 obj = receiver->operand();
2599 }
2600 assert(obj->is_valid(), "must be valid");
2601
2602 if (method()->is_synchronized() && GenerateSynchronizationCode) {
2603 LIR_Opr lock = syncLockOpr();
2604 __ load_stack_address_monitor(0, lock);
2605
2606 CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2607 CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2608
2609 // receiver is guaranteed non-null so don't need CodeEmitInfo
3162 int freq_log = 0;
3163 int level = compilation()->env()->comp_level();
3164 if (level == CompLevel_limited_profile) {
3165 freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3166 } else if (level == CompLevel_full_profile) {
3167 freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3168 } else {
3169 ShouldNotReachHere();
3170 }
3171 // Increment the appropriate invocation/backedge counter and notify the runtime.
3172 double scale;
3173 if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3174 freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3175 }
3176 increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3177 }
3178
3179 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3180 ciMethod *method, LIR_Opr step, int frequency,
3181 int bci, bool backedge, bool notify) {
3182 if (PreloadOnly) {
3183 // Nothing to do if we only use preload code.
3184 return;
3185 }
3186 assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3187 int level = _compilation->env()->comp_level();
3188 assert(level > CompLevel_simple, "Shouldn't be here");
3189
3190 int offset = -1;
3191 LIR_Opr counter_holder;
3192 if (level == CompLevel_limited_profile) {
3193 MethodCounters* counters_adr = method->ensure_method_counters();
3194 if (counters_adr == nullptr) {
3195 bailout("method counters allocation failed");
3196 return;
3197 }
3198 if (AOTCodeCache::is_on()) {
3199 counter_holder = new_register(T_METADATA);
3200 __ metadata2reg(counters_adr, counter_holder);
3201 } else {
3202 counter_holder = new_pointer_register();
3203 __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3204 }
3205 offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3206 MethodCounters::invocation_counter_offset());
3207 } else if (level == CompLevel_full_profile) {
3208 counter_holder = new_register(T_METADATA);
3209 offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3210 MethodData::invocation_counter_offset());
3211 ciMethodData* md = method->method_data_or_null();
3212 assert(md != nullptr, "Sanity");
3213 __ metadata2reg(md->constant_encoding(), counter_holder);
3214 } else {
3215 ShouldNotReachHere();
3216 }
3217 LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3218 LIR_Opr result = new_register(T_INT);
3219 __ load(counter, result);
3220 __ add(result, step, result);
3221 __ store(result, counter);
3222 if (notify && (!backedge || UseOnStackReplacement)) {
3223 LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3224 // The bci for info can point to cmp for if's we want the if bci
|