< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page

 618                   _options(options),
 619                   _method(target),
 620                   _entry_bci(osr_bci),
 621                   _ilt(nullptr),
 622                   _stub_function(nullptr),
 623                   _stub_name(nullptr),
 624                   _stub_entry_point(nullptr),
 625                   _max_node_limit(MaxNodeLimit),
 626                   _post_loop_opts_phase(false),
 627                   _allow_macro_nodes(true),
 628                   _inlining_progress(false),
 629                   _inlining_incrementally(false),
 630                   _do_cleanup(false),
 631                   _has_reserved_stack_access(target->has_reserved_stack_access()),
 632 #ifndef PRODUCT
 633                   _igv_idx(0),
 634                   _trace_opto_output(directive->TraceOptoOutputOption),
 635 #endif
 636                   _has_method_handle_invokes(false),
 637                   _clinit_barrier_on_entry(false),

 638                   _stress_seed(0),
 639                   _comp_arena(mtCompiler),
 640                   _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 641                   _env(ci_env),
 642                   _directive(directive),
 643                   _log(ci_env->log()),
 644                   _first_failure_details(nullptr),
 645                   _intrinsics        (comp_arena(), 0, 0, nullptr),
 646                   _macro_nodes       (comp_arena(), 8, 0, nullptr),
 647                   _parse_predicates  (comp_arena(), 8, 0, nullptr),
 648                   _template_assertion_predicate_opaqs (comp_arena(), 8, 0, nullptr),
 649                   _expensive_nodes   (comp_arena(), 8, 0, nullptr),
 650                   _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 651                   _unstable_if_traps (comp_arena(), 8, 0, nullptr),
 652                   _coarsened_locks   (comp_arena(), 8, 0, nullptr),
 653                   _congraph(nullptr),
 654                   NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 655                   _unique(0),
 656                   _dead_node_count(0),
 657                   _dead_node_list(comp_arena()),

 899   : Phase(Compiler),
 900     _compile_id(0),
 901     _options(Options::for_runtime_stub()),
 902     _method(nullptr),
 903     _entry_bci(InvocationEntryBci),
 904     _stub_function(stub_function),
 905     _stub_name(stub_name),
 906     _stub_entry_point(nullptr),
 907     _max_node_limit(MaxNodeLimit),
 908     _post_loop_opts_phase(false),
 909     _allow_macro_nodes(true),
 910     _inlining_progress(false),
 911     _inlining_incrementally(false),
 912     _has_reserved_stack_access(false),
 913 #ifndef PRODUCT
 914     _igv_idx(0),
 915     _trace_opto_output(directive->TraceOptoOutputOption),
 916 #endif
 917     _has_method_handle_invokes(false),
 918     _clinit_barrier_on_entry(false),

 919     _stress_seed(0),
 920     _comp_arena(mtCompiler),
 921     _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 922     _env(ci_env),
 923     _directive(directive),
 924     _log(ci_env->log()),
 925     _first_failure_details(nullptr),
 926     _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 927     _congraph(nullptr),
 928     NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 929     _unique(0),
 930     _dead_node_count(0),
 931     _dead_node_list(comp_arena()),
 932     _node_arena_one(mtCompiler),
 933     _node_arena_two(mtCompiler),
 934     _node_arena(&_node_arena_one),
 935     _mach_constant_base_node(nullptr),
 936     _Compile_types(mtCompiler),
 937     _initial_gvn(nullptr),
 938     _igvn_worklist(nullptr),

1077 
1078   set_rtm_state(NoRTM); // No RTM lock eliding by default
1079   _max_node_limit = _directive->MaxNodeLimitOption;
1080 
1081 #if INCLUDE_RTM_OPT
1082   if (UseRTMLocking && has_method() && (method()->method_data_or_null() != nullptr)) {
1083     int rtm_state = method()->method_data()->rtm_state();
1084     if (method_has_option(CompileCommandEnum::NoRTMLockEliding) || ((rtm_state & NoRTM) != 0)) {
1085       // Don't generate RTM lock eliding code.
1086       set_rtm_state(NoRTM);
1087     } else if (method_has_option(CompileCommandEnum::UseRTMLockEliding) || ((rtm_state & UseRTM) != 0) || !UseRTMDeopt) {
1088       // Generate RTM lock eliding code without abort ratio calculation code.
1089       set_rtm_state(UseRTM);
1090     } else if (UseRTMDeopt) {
1091       // Generate RTM lock eliding code and include abort ratio calculation
1092       // code if UseRTMDeopt is on.
1093       set_rtm_state(ProfileRTM);
1094     }
1095   }
1096 #endif
1097   if (VM_Version::supports_fast_class_init_checks() && has_method() && !is_osr_compilation() && method()->needs_clinit_barrier()) {

1098     set_clinit_barrier_on_entry(true);



1099   }
1100   if (debug_info()->recording_non_safepoints()) {
1101     set_node_note_array(new(comp_arena()) GrowableArray<Node_Notes*>
1102                         (comp_arena(), 8, 0, nullptr));
1103     set_default_node_notes(Node_Notes::make(this));
1104   }
1105 
1106   const int grow_ats = 16;
1107   _max_alias_types = grow_ats;
1108   _alias_types   = NEW_ARENA_ARRAY(comp_arena(), AliasType*, grow_ats);
1109   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1110   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1111   {
1112     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1113   }
1114   // Initialize the first few types.
1115   _alias_types[AliasIdxTop]->Init(AliasIdxTop, nullptr);
1116   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1117   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1118   _num_alias_types = AliasIdxRaw+1;

4126       frc.get_float_count() > 32 &&
4127       frc.get_double_count() == 0 &&
4128       (10 * frc.get_call_count() < frc.get_float_count()) ) {
4129     set_24_bit_selection_and_mode(false, true);
4130   }
4131 #endif // IA32
4132 
4133   set_java_calls(frc.get_java_call_count());
4134   set_inner_loops(frc.get_inner_loop_count());
4135 
4136   // No infinite loops, no reason to bail out.
4137   return false;
4138 }
4139 
4140 //-----------------------------too_many_traps----------------------------------
4141 // Report if there are too many traps at the current method and bci.
4142 // Return true if there was a trap, and/or PerMethodTrapLimit is exceeded.
4143 bool Compile::too_many_traps(ciMethod* method,
4144                              int bci,
4145                              Deoptimization::DeoptReason reason) {



4146   ciMethodData* md = method->method_data();
4147   if (md->is_empty()) {
4148     // Assume the trap has not occurred, or that it occurred only
4149     // because of a transient condition during start-up in the interpreter.
4150     return false;
4151   }
4152   ciMethod* m = Deoptimization::reason_is_speculate(reason) ? this->method() : nullptr;
4153   if (md->has_trap_at(bci, m, reason) != 0) {
4154     // Assume PerBytecodeTrapLimit==0, for a more conservative heuristic.
4155     // Also, if there are multiple reasons, or if there is no per-BCI record,
4156     // assume the worst.
4157     if (log())
4158       log()->elem("observe trap='%s' count='%d'",
4159                   Deoptimization::trap_reason_name(reason),
4160                   md->trap_count(reason));
4161     return true;
4162   } else {
4163     // Ignore method/bci and see if there have been too many globally.
4164     return too_many_traps(reason, md);
4165   }

4240   _allowed_reasons = 0;
4241   if (is_method_compilation()) {
4242     for (int rs = (int)Deoptimization::Reason_none+1; rs < Compile::trapHistLength; rs++) {
4243       assert(rs < BitsPerInt, "recode bit map");
4244       if (!too_many_traps((Deoptimization::DeoptReason) rs)) {
4245         _allowed_reasons |= nth_bit(rs);
4246       }
4247     }
4248   }
4249 }
4250 
4251 bool Compile::needs_clinit_barrier(ciMethod* method, ciMethod* accessing_method) {
4252   return method->is_static() && needs_clinit_barrier(method->holder(), accessing_method);
4253 }
4254 
4255 bool Compile::needs_clinit_barrier(ciField* field, ciMethod* accessing_method) {
4256   return field->is_static() && needs_clinit_barrier(field->holder(), accessing_method);
4257 }
4258 
4259 bool Compile::needs_clinit_barrier(ciInstanceKlass* holder, ciMethod* accessing_method) {
4260   if (holder->is_initialized()) {
4261     return false;
4262   }
4263   if (holder->is_being_initialized()) {
4264     if (accessing_method->holder() == holder) {
4265       // Access inside a class. The barrier can be elided when access happens in <clinit>,
4266       // <init>, or a static method. In all those cases, there was an initialization
4267       // barrier on the holder klass passed.
4268       if (accessing_method->is_static_initializer() ||
4269           accessing_method->is_object_initializer() ||
4270           accessing_method->is_static()) {
4271         return false;
4272       }
4273     } else if (accessing_method->holder()->is_subclass_of(holder)) {
4274       // Access from a subclass. The barrier can be elided only when access happens in <clinit>.
4275       // In case of <init> or a static method, the barrier is on the subclass is not enough:
4276       // child class can become fully initialized while its parent class is still being initialized.
4277       if (accessing_method->is_static_initializer()) {
4278         return false;
4279       }
4280     }
4281     ciMethod* root = method(); // the root method of compilation
4282     if (root != accessing_method) {
4283       return needs_clinit_barrier(holder, root); // check access in the context of compilation root

4626     // Reset _print_inlining_list, it only contains destructed objects.
4627     // It is on the arena, so it will be freed when the arena is reset.
4628     _print_inlining_list = nullptr;
4629     // _print_inlining_stream won't be used anymore, either.
4630     print_inlining_reset();
4631     size_t end = ss.size();
4632     _print_inlining_output = NEW_ARENA_ARRAY(comp_arena(), char, end+1);
4633     strncpy(_print_inlining_output, ss.freeze(), end+1);
4634     _print_inlining_output[end] = 0;
4635   }
4636 }
4637 
4638 void Compile::dump_print_inlining() {
4639   if (_print_inlining_output != nullptr) {
4640     tty->print_raw(_print_inlining_output);
4641   }
4642 }
4643 
4644 void Compile::log_late_inline(CallGenerator* cg) {
4645   if (log() != nullptr) {
4646     log()->head("late_inline method='%d'  inline_id='" JLONG_FORMAT "'", log()->identify(cg->method()),
4647                 cg->unique_id());
4648     JVMState* p = cg->call_node()->jvms();
4649     while (p != nullptr) {
4650       log()->elem("jvms bci='%d' method='%d'", p->bci(), log()->identify(p->method()));
4651       p = p->caller();
4652     }
4653     log()->tail("late_inline");
4654   }
4655 }
4656 
4657 void Compile::log_late_inline_failure(CallGenerator* cg, const char* msg) {
4658   log_late_inline(cg);
4659   if (log() != nullptr) {
4660     log()->inline_fail(msg);
4661   }
4662 }
4663 
4664 void Compile::log_inline_id(CallGenerator* cg) {
4665   if (log() != nullptr) {
4666     // The LogCompilation tool needs a unique way to identify late

 618                   _options(options),
 619                   _method(target),
 620                   _entry_bci(osr_bci),
 621                   _ilt(nullptr),
 622                   _stub_function(nullptr),
 623                   _stub_name(nullptr),
 624                   _stub_entry_point(nullptr),
 625                   _max_node_limit(MaxNodeLimit),
 626                   _post_loop_opts_phase(false),
 627                   _allow_macro_nodes(true),
 628                   _inlining_progress(false),
 629                   _inlining_incrementally(false),
 630                   _do_cleanup(false),
 631                   _has_reserved_stack_access(target->has_reserved_stack_access()),
 632 #ifndef PRODUCT
 633                   _igv_idx(0),
 634                   _trace_opto_output(directive->TraceOptoOutputOption),
 635 #endif
 636                   _has_method_handle_invokes(false),
 637                   _clinit_barrier_on_entry(false),
 638                   _has_clinit_barriers(false),
 639                   _stress_seed(0),
 640                   _comp_arena(mtCompiler),
 641                   _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 642                   _env(ci_env),
 643                   _directive(directive),
 644                   _log(ci_env->log()),
 645                   _first_failure_details(nullptr),
 646                   _intrinsics        (comp_arena(), 0, 0, nullptr),
 647                   _macro_nodes       (comp_arena(), 8, 0, nullptr),
 648                   _parse_predicates  (comp_arena(), 8, 0, nullptr),
 649                   _template_assertion_predicate_opaqs (comp_arena(), 8, 0, nullptr),
 650                   _expensive_nodes   (comp_arena(), 8, 0, nullptr),
 651                   _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 652                   _unstable_if_traps (comp_arena(), 8, 0, nullptr),
 653                   _coarsened_locks   (comp_arena(), 8, 0, nullptr),
 654                   _congraph(nullptr),
 655                   NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 656                   _unique(0),
 657                   _dead_node_count(0),
 658                   _dead_node_list(comp_arena()),

 900   : Phase(Compiler),
 901     _compile_id(0),
 902     _options(Options::for_runtime_stub()),
 903     _method(nullptr),
 904     _entry_bci(InvocationEntryBci),
 905     _stub_function(stub_function),
 906     _stub_name(stub_name),
 907     _stub_entry_point(nullptr),
 908     _max_node_limit(MaxNodeLimit),
 909     _post_loop_opts_phase(false),
 910     _allow_macro_nodes(true),
 911     _inlining_progress(false),
 912     _inlining_incrementally(false),
 913     _has_reserved_stack_access(false),
 914 #ifndef PRODUCT
 915     _igv_idx(0),
 916     _trace_opto_output(directive->TraceOptoOutputOption),
 917 #endif
 918     _has_method_handle_invokes(false),
 919     _clinit_barrier_on_entry(false),
 920     _has_clinit_barriers(false),
 921     _stress_seed(0),
 922     _comp_arena(mtCompiler),
 923     _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 924     _env(ci_env),
 925     _directive(directive),
 926     _log(ci_env->log()),
 927     _first_failure_details(nullptr),
 928     _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 929     _congraph(nullptr),
 930     NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 931     _unique(0),
 932     _dead_node_count(0),
 933     _dead_node_list(comp_arena()),
 934     _node_arena_one(mtCompiler),
 935     _node_arena_two(mtCompiler),
 936     _node_arena(&_node_arena_one),
 937     _mach_constant_base_node(nullptr),
 938     _Compile_types(mtCompiler),
 939     _initial_gvn(nullptr),
 940     _igvn_worklist(nullptr),

1079 
1080   set_rtm_state(NoRTM); // No RTM lock eliding by default
1081   _max_node_limit = _directive->MaxNodeLimitOption;
1082 
1083 #if INCLUDE_RTM_OPT
1084   if (UseRTMLocking && has_method() && (method()->method_data_or_null() != nullptr)) {
1085     int rtm_state = method()->method_data()->rtm_state();
1086     if (method_has_option(CompileCommandEnum::NoRTMLockEliding) || ((rtm_state & NoRTM) != 0)) {
1087       // Don't generate RTM lock eliding code.
1088       set_rtm_state(NoRTM);
1089     } else if (method_has_option(CompileCommandEnum::UseRTMLockEliding) || ((rtm_state & UseRTM) != 0) || !UseRTMDeopt) {
1090       // Generate RTM lock eliding code without abort ratio calculation code.
1091       set_rtm_state(UseRTM);
1092     } else if (UseRTMDeopt) {
1093       // Generate RTM lock eliding code and include abort ratio calculation
1094       // code if UseRTMDeopt is on.
1095       set_rtm_state(ProfileRTM);
1096     }
1097   }
1098 #endif
1099   if (VM_Version::supports_fast_class_init_checks() && has_method() && !is_osr_compilation() &&
1100       (method()->needs_clinit_barrier() || (do_clinit_barriers() && method()->is_static()))) {
1101     set_clinit_barrier_on_entry(true);
1102     if (do_clinit_barriers()) {
1103       set_has_clinit_barriers(true); // Entry clinit barrier is in prolog code.
1104     }
1105   }
1106   if (debug_info()->recording_non_safepoints()) {
1107     set_node_note_array(new(comp_arena()) GrowableArray<Node_Notes*>
1108                         (comp_arena(), 8, 0, nullptr));
1109     set_default_node_notes(Node_Notes::make(this));
1110   }
1111 
1112   const int grow_ats = 16;
1113   _max_alias_types = grow_ats;
1114   _alias_types   = NEW_ARENA_ARRAY(comp_arena(), AliasType*, grow_ats);
1115   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1116   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1117   {
1118     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1119   }
1120   // Initialize the first few types.
1121   _alias_types[AliasIdxTop]->Init(AliasIdxTop, nullptr);
1122   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1123   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1124   _num_alias_types = AliasIdxRaw+1;

4132       frc.get_float_count() > 32 &&
4133       frc.get_double_count() == 0 &&
4134       (10 * frc.get_call_count() < frc.get_float_count()) ) {
4135     set_24_bit_selection_and_mode(false, true);
4136   }
4137 #endif // IA32
4138 
4139   set_java_calls(frc.get_java_call_count());
4140   set_inner_loops(frc.get_inner_loop_count());
4141 
4142   // No infinite loops, no reason to bail out.
4143   return false;
4144 }
4145 
4146 //-----------------------------too_many_traps----------------------------------
4147 // Report if there are too many traps at the current method and bci.
4148 // Return true if there was a trap, and/or PerMethodTrapLimit is exceeded.
4149 bool Compile::too_many_traps(ciMethod* method,
4150                              int bci,
4151                              Deoptimization::DeoptReason reason) {
4152   if (method->has_trap_at(bci)) {
4153     return true;
4154   }
4155   ciMethodData* md = method->method_data();
4156   if (md->is_empty()) {
4157     // Assume the trap has not occurred, or that it occurred only
4158     // because of a transient condition during start-up in the interpreter.
4159     return false;
4160   }
4161   ciMethod* m = Deoptimization::reason_is_speculate(reason) ? this->method() : nullptr;
4162   if (md->has_trap_at(bci, m, reason) != 0) {
4163     // Assume PerBytecodeTrapLimit==0, for a more conservative heuristic.
4164     // Also, if there are multiple reasons, or if there is no per-BCI record,
4165     // assume the worst.
4166     if (log())
4167       log()->elem("observe trap='%s' count='%d'",
4168                   Deoptimization::trap_reason_name(reason),
4169                   md->trap_count(reason));
4170     return true;
4171   } else {
4172     // Ignore method/bci and see if there have been too many globally.
4173     return too_many_traps(reason, md);
4174   }

4249   _allowed_reasons = 0;
4250   if (is_method_compilation()) {
4251     for (int rs = (int)Deoptimization::Reason_none+1; rs < Compile::trapHistLength; rs++) {
4252       assert(rs < BitsPerInt, "recode bit map");
4253       if (!too_many_traps((Deoptimization::DeoptReason) rs)) {
4254         _allowed_reasons |= nth_bit(rs);
4255       }
4256     }
4257   }
4258 }
4259 
4260 bool Compile::needs_clinit_barrier(ciMethod* method, ciMethod* accessing_method) {
4261   return method->is_static() && needs_clinit_barrier(method->holder(), accessing_method);
4262 }
4263 
4264 bool Compile::needs_clinit_barrier(ciField* field, ciMethod* accessing_method) {
4265   return field->is_static() && needs_clinit_barrier(field->holder(), accessing_method);
4266 }
4267 
4268 bool Compile::needs_clinit_barrier(ciInstanceKlass* holder, ciMethod* accessing_method) {
4269   if (holder->is_initialized() && !do_clinit_barriers()) {
4270     return false;
4271   }
4272   if (holder->is_being_initialized() || do_clinit_barriers()) {
4273     if (accessing_method->holder() == holder) {
4274       // Access inside a class. The barrier can be elided when access happens in <clinit>,
4275       // <init>, or a static method. In all those cases, there was an initialization
4276       // barrier on the holder klass passed.
4277       if (accessing_method->is_static_initializer() ||
4278           accessing_method->is_object_initializer() ||
4279           accessing_method->is_static()) {
4280         return false;
4281       }
4282     } else if (accessing_method->holder()->is_subclass_of(holder)) {
4283       // Access from a subclass. The barrier can be elided only when access happens in <clinit>.
4284       // In case of <init> or a static method, the barrier is on the subclass is not enough:
4285       // child class can become fully initialized while its parent class is still being initialized.
4286       if (accessing_method->is_static_initializer()) {
4287         return false;
4288       }
4289     }
4290     ciMethod* root = method(); // the root method of compilation
4291     if (root != accessing_method) {
4292       return needs_clinit_barrier(holder, root); // check access in the context of compilation root

4635     // Reset _print_inlining_list, it only contains destructed objects.
4636     // It is on the arena, so it will be freed when the arena is reset.
4637     _print_inlining_list = nullptr;
4638     // _print_inlining_stream won't be used anymore, either.
4639     print_inlining_reset();
4640     size_t end = ss.size();
4641     _print_inlining_output = NEW_ARENA_ARRAY(comp_arena(), char, end+1);
4642     strncpy(_print_inlining_output, ss.freeze(), end+1);
4643     _print_inlining_output[end] = 0;
4644   }
4645 }
4646 
4647 void Compile::dump_print_inlining() {
4648   if (_print_inlining_output != nullptr) {
4649     tty->print_raw(_print_inlining_output);
4650   }
4651 }
4652 
4653 void Compile::log_late_inline(CallGenerator* cg) {
4654   if (log() != nullptr) {
4655     log()->head("late_inline method='%d' inline_id='" JLONG_FORMAT "'", log()->identify(cg->method()),
4656                 cg->unique_id());
4657     JVMState* p = cg->call_node()->jvms();
4658     while (p != nullptr) {
4659       log()->elem("jvms bci='%d' method='%d'", p->bci(), log()->identify(p->method()));
4660       p = p->caller();
4661     }
4662     log()->tail("late_inline");
4663   }
4664 }
4665 
4666 void Compile::log_late_inline_failure(CallGenerator* cg, const char* msg) {
4667   log_late_inline(cg);
4668   if (log() != nullptr) {
4669     log()->inline_fail(msg);
4670   }
4671 }
4672 
4673 void Compile::log_inline_id(CallGenerator* cg) {
4674   if (log() != nullptr) {
4675     // The LogCompilation tool needs a unique way to identify late
< prev index next >