< prev index next >

src/hotspot/share/c1/c1_LIRGenerator.cpp

Print this page

  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "c1/c1_Compilation.hpp"
  26 #include "c1/c1_Defs.hpp"
  27 #include "c1/c1_FrameMap.hpp"
  28 #include "c1/c1_Instruction.hpp"
  29 #include "c1/c1_LIRAssembler.hpp"
  30 #include "c1/c1_LIRGenerator.hpp"
  31 #include "c1/c1_ValueStack.hpp"
  32 #include "ci/ciArrayKlass.hpp"
  33 #include "ci/ciInstance.hpp"
  34 #include "ci/ciObjArray.hpp"
  35 #include "ci/ciUtilities.hpp"

  36 #include "compiler/compilerDefinitions.inline.hpp"
  37 #include "compiler/compilerOracle.hpp"
  38 #include "gc/shared/barrierSet.hpp"
  39 #include "gc/shared/c1/barrierSetC1.hpp"
  40 #include "oops/klass.inline.hpp"
  41 #include "oops/methodCounters.hpp"

  42 #include "runtime/sharedRuntime.hpp"
  43 #include "runtime/stubRoutines.hpp"
  44 #include "runtime/vm_version.hpp"
  45 #include "utilities/bitMap.inline.hpp"
  46 #include "utilities/macros.hpp"
  47 #include "utilities/powerOfTwo.hpp"
  48 
  49 #ifdef ASSERT
  50 #define __ gen()->lir(__FILE__, __LINE__)->
  51 #else
  52 #define __ gen()->lir()->
  53 #endif
  54 
  55 #ifndef PATCHED_ADDR
  56 #define PATCHED_ADDR  (max_jint)
  57 #endif
  58 
  59 void PhiResolverState::reset() {
  60   _virtual_operands.clear();
  61   _other_operands.clear();

 639   __ load_stack_address_monitor(monitor_no, lock);
 640   __ unlock_object(hdr, object, lock, scratch, slow_path);
 641 }
 642 
 643 #ifndef PRODUCT
 644 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
 645   if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
 646     tty->print_cr("   ###class not loaded at new bci %d", new_instance->printable_bci());
 647   } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
 648     tty->print_cr("   ###class not resolved at new bci %d", new_instance->printable_bci());
 649   }
 650 }
 651 #endif
 652 
 653 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
 654   klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
 655   // If klass is not loaded we do not know if the klass has finalizers:
 656   if (UseFastNewInstance && klass->is_loaded()
 657       && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
 658 
 659     StubId stub_id = klass->is_initialized() ? StubId::c1_fast_new_instance_id : StubId::c1_fast_new_instance_init_check_id;

 660 
 661     CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
 662 
 663     assert(klass->is_loaded(), "must be loaded");
 664     // allocate space for instance
 665     assert(klass->size_helper() > 0, "illegal instance size");
 666     const int instance_size = align_object_size(klass->size_helper());
 667     __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
 668                        oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path);
 669   } else {
 670     CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, StubId::c1_new_instance_id);
 671     __ branch(lir_cond_always, slow_path);
 672     __ branch_destination(slow_path->continuation());
 673   }
 674 }
 675 
 676 
 677 static bool is_constant_zero(Instruction* inst) {
 678   IntConstant* c = inst->type()->as_IntConstant();
 679   if (c) {
 680     return (c->value() == 0);
 681   }
 682   return false;
 683 }
 684 
 685 
 686 static bool positive_constant(Instruction* inst) {
 687   IntConstant* c = inst->type()->as_IntConstant();
 688   if (c) {

2554 #endif // __SOFTFP__
2555     local->set_operand(dest);
2556 #ifdef ASSERT
2557     _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2558 #endif
2559     java_index += type2size[t];
2560   }
2561 
2562   if (compilation()->env()->dtrace_method_probes()) {
2563     BasicTypeList signature;
2564     signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT));    // thread
2565     signature.append(T_METADATA); // Method*
2566     LIR_OprList* args = new LIR_OprList();
2567     args->append(getThreadPointer());
2568     LIR_Opr meth = new_register(T_METADATA);
2569     __ metadata2reg(method()->constant_encoding(), meth);
2570     args->append(meth);
2571     call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2572   }
2573 











2574   if (method()->is_synchronized()) {
2575     LIR_Opr obj;
2576     if (method()->is_static()) {
2577       obj = new_register(T_OBJECT);
2578       __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2579     } else {
2580       Local* receiver = x->state()->local_at(0)->as_Local();
2581       assert(receiver != nullptr, "must already exist");
2582       obj = receiver->operand();
2583     }
2584     assert(obj->is_valid(), "must be valid");
2585 
2586     if (method()->is_synchronized()) {
2587       LIR_Opr lock = syncLockOpr();
2588       __ load_stack_address_monitor(0, lock);
2589 
2590       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2591       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2592 
2593       // receiver is guaranteed non-null so don't need CodeEmitInfo

3128   int freq_log = 0;
3129   int level = compilation()->env()->comp_level();
3130   if (level == CompLevel_limited_profile) {
3131     freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3132   } else if (level == CompLevel_full_profile) {
3133     freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3134   } else {
3135     ShouldNotReachHere();
3136   }
3137   // Increment the appropriate invocation/backedge counter and notify the runtime.
3138   double scale;
3139   if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3140     freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3141   }
3142   increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3143 }
3144 
3145 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3146                                                 ciMethod *method, LIR_Opr step, int frequency,
3147                                                 int bci, bool backedge, bool notify) {




3148   assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3149   int level = _compilation->env()->comp_level();
3150   assert(level > CompLevel_simple, "Shouldn't be here");
3151 
3152   int offset = -1;
3153   LIR_Opr counter_holder;
3154   if (level == CompLevel_limited_profile) {
3155     MethodCounters* counters_adr = method->ensure_method_counters();
3156     if (counters_adr == nullptr) {
3157       bailout("method counters allocation failed");
3158       return;
3159     }
3160     counter_holder = new_pointer_register();
3161     __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);





3162     offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3163                                  MethodCounters::invocation_counter_offset());
3164   } else if (level == CompLevel_full_profile) {
3165     counter_holder = new_register(T_METADATA);
3166     offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3167                                  MethodData::invocation_counter_offset());
3168     ciMethodData* md = method->method_data_or_null();
3169     assert(md != nullptr, "Sanity");
3170     __ metadata2reg(md->constant_encoding(), counter_holder);
3171   } else {
3172     ShouldNotReachHere();
3173   }
3174   LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3175   LIR_Opr result = new_register(T_INT);
3176   __ load(counter, result);
3177   __ add(result, step, result);
3178   __ store(result, counter);
3179   if (notify && (!backedge || UseOnStackReplacement)) {
3180     LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3181     // The bci for info can point to cmp for if's we want the if bci

  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "c1/c1_Compilation.hpp"
  26 #include "c1/c1_Defs.hpp"
  27 #include "c1/c1_FrameMap.hpp"
  28 #include "c1/c1_Instruction.hpp"
  29 #include "c1/c1_LIRAssembler.hpp"
  30 #include "c1/c1_LIRGenerator.hpp"
  31 #include "c1/c1_ValueStack.hpp"
  32 #include "ci/ciArrayKlass.hpp"
  33 #include "ci/ciInstance.hpp"
  34 #include "ci/ciObjArray.hpp"
  35 #include "ci/ciUtilities.hpp"
  36 #include "code/aotCodeCache.hpp"
  37 #include "compiler/compilerDefinitions.inline.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "gc/shared/barrierSet.hpp"
  40 #include "gc/shared/c1/barrierSetC1.hpp"
  41 #include "oops/klass.inline.hpp"
  42 #include "oops/methodCounters.hpp"
  43 #include "runtime/runtimeUpcalls.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 #include "runtime/stubRoutines.hpp"
  46 #include "runtime/vm_version.hpp"
  47 #include "utilities/bitMap.inline.hpp"
  48 #include "utilities/macros.hpp"
  49 #include "utilities/powerOfTwo.hpp"
  50 
  51 #ifdef ASSERT
  52 #define __ gen()->lir(__FILE__, __LINE__)->
  53 #else
  54 #define __ gen()->lir()->
  55 #endif
  56 
  57 #ifndef PATCHED_ADDR
  58 #define PATCHED_ADDR  (max_jint)
  59 #endif
  60 
  61 void PhiResolverState::reset() {
  62   _virtual_operands.clear();
  63   _other_operands.clear();

 641   __ load_stack_address_monitor(monitor_no, lock);
 642   __ unlock_object(hdr, object, lock, scratch, slow_path);
 643 }
 644 
 645 #ifndef PRODUCT
 646 void LIRGenerator::print_if_not_loaded(const NewInstance* new_instance) {
 647   if (PrintNotLoaded && !new_instance->klass()->is_loaded()) {
 648     tty->print_cr("   ###class not loaded at new bci %d", new_instance->printable_bci());
 649   } else if (PrintNotLoaded && (!CompilerConfig::is_c1_only_no_jvmci() && new_instance->is_unresolved())) {
 650     tty->print_cr("   ###class not resolved at new bci %d", new_instance->printable_bci());
 651   }
 652 }
 653 #endif
 654 
 655 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
 656   klass2reg_with_patching(klass_reg, klass, info, is_unresolved);
 657   // If klass is not loaded we do not know if the klass has finalizers:
 658   if (UseFastNewInstance && klass->is_loaded()
 659       && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
 660 
 661     bool known_initialized = klass->is_initialized() && !compilation()->env()->is_precompile();
 662     StubId stub_id = known_initialized ? StubId::c1_fast_new_instance_id : StubId::c1_fast_new_instance_init_check_id;
 663 
 664     CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, stub_id);
 665 
 666     assert(klass->is_loaded(), "must be loaded");
 667     // allocate space for instance
 668     assert(klass->size_helper() > 0, "illegal instance size");
 669     const int instance_size = align_object_size(klass->size_helper());
 670     __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4,
 671                        oopDesc::header_size(), instance_size, klass_reg, !known_initialized, slow_path);
 672   } else {
 673     CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, StubId::c1_new_instance_id);
 674     __ branch(lir_cond_always, slow_path);
 675     __ branch_destination(slow_path->continuation());
 676   }
 677 }
 678 
 679 
 680 static bool is_constant_zero(Instruction* inst) {
 681   IntConstant* c = inst->type()->as_IntConstant();
 682   if (c) {
 683     return (c->value() == 0);
 684   }
 685   return false;
 686 }
 687 
 688 
 689 static bool positive_constant(Instruction* inst) {
 690   IntConstant* c = inst->type()->as_IntConstant();
 691   if (c) {

2557 #endif // __SOFTFP__
2558     local->set_operand(dest);
2559 #ifdef ASSERT
2560     _instruction_for_operand.at_put_grow(dest->vreg_number(), local, nullptr);
2561 #endif
2562     java_index += type2size[t];
2563   }
2564 
2565   if (compilation()->env()->dtrace_method_probes()) {
2566     BasicTypeList signature;
2567     signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT));    // thread
2568     signature.append(T_METADATA); // Method*
2569     LIR_OprList* args = new LIR_OprList();
2570     args->append(getThreadPointer());
2571     LIR_Opr meth = new_register(T_METADATA);
2572     __ metadata2reg(method()->constant_encoding(), meth);
2573     args->append(meth);
2574     call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), voidType, nullptr);
2575   }
2576 
2577   MethodDetails method_details(method());
2578   RuntimeUpcallInfo* upcall = RuntimeUpcalls::get_first_upcall(RuntimeUpcallType::onMethodEntry, method_details);
2579   while (upcall != nullptr) {
2580     BasicTypeList signature;
2581     signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT));    // thread
2582     LIR_OprList* args = new LIR_OprList();
2583     args->append(getThreadPointer());
2584     call_runtime(&signature, args, upcall->upcall_address(), voidType, nullptr);
2585     upcall = RuntimeUpcalls::get_next_upcall(RuntimeUpcallType::onMethodEntry, method_details, upcall);
2586   }
2587 
2588   if (method()->is_synchronized()) {
2589     LIR_Opr obj;
2590     if (method()->is_static()) {
2591       obj = new_register(T_OBJECT);
2592       __ oop2reg(method()->holder()->java_mirror()->constant_encoding(), obj);
2593     } else {
2594       Local* receiver = x->state()->local_at(0)->as_Local();
2595       assert(receiver != nullptr, "must already exist");
2596       obj = receiver->operand();
2597     }
2598     assert(obj->is_valid(), "must be valid");
2599 
2600     if (method()->is_synchronized()) {
2601       LIR_Opr lock = syncLockOpr();
2602       __ load_stack_address_monitor(0, lock);
2603 
2604       CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), nullptr, x->check_flag(Instruction::DeoptimizeOnException));
2605       CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
2606 
2607       // receiver is guaranteed non-null so don't need CodeEmitInfo

3142   int freq_log = 0;
3143   int level = compilation()->env()->comp_level();
3144   if (level == CompLevel_limited_profile) {
3145     freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3146   } else if (level == CompLevel_full_profile) {
3147     freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3148   } else {
3149     ShouldNotReachHere();
3150   }
3151   // Increment the appropriate invocation/backedge counter and notify the runtime.
3152   double scale;
3153   if (_method->has_option_value(CompileCommandEnum::CompileThresholdScaling, scale)) {
3154     freq_log = CompilerConfig::scaled_freq_log(freq_log, scale);
3155   }
3156   increment_event_counter_impl(info, info->scope()->method(), step, right_n_bits(freq_log), bci, backedge, true);
3157 }
3158 
3159 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3160                                                 ciMethod *method, LIR_Opr step, int frequency,
3161                                                 int bci, bool backedge, bool notify) {
3162   if (PreloadOnly) {
3163     // Nothing to do if we only use preload code.
3164     return;
3165   }
3166   assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3167   int level = _compilation->env()->comp_level();
3168   assert(level > CompLevel_simple, "Shouldn't be here");
3169 
3170   int offset = -1;
3171   LIR_Opr counter_holder;
3172   if (level == CompLevel_limited_profile) {
3173     MethodCounters* counters_adr = method->ensure_method_counters();
3174     if (counters_adr == nullptr) {
3175       bailout("method counters allocation failed");
3176       return;
3177     }
3178     if (AOTCodeCache::is_on()) {
3179       counter_holder = new_register(T_METADATA);
3180       __ metadata2reg(counters_adr, counter_holder);
3181     } else {
3182       counter_holder = new_pointer_register();
3183       __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3184     }
3185     offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3186                                  MethodCounters::invocation_counter_offset());
3187   } else if (level == CompLevel_full_profile) {
3188     counter_holder = new_register(T_METADATA);
3189     offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3190                                  MethodData::invocation_counter_offset());
3191     ciMethodData* md = method->method_data_or_null();
3192     assert(md != nullptr, "Sanity");
3193     __ metadata2reg(md->constant_encoding(), counter_holder);
3194   } else {
3195     ShouldNotReachHere();
3196   }
3197   LIR_Address* counter = new LIR_Address(counter_holder, offset, T_INT);
3198   LIR_Opr result = new_register(T_INT);
3199   __ load(counter, result);
3200   __ add(result, step, result);
3201   __ store(result, counter);
3202   if (notify && (!backedge || UseOnStackReplacement)) {
3203     LIR_Opr meth = LIR_OprFact::metadataConst(method->constant_encoding());
3204     // The bci for info can point to cmp for if's we want the if bci
< prev index next >