< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page

 628       _method(target),
 629       _entry_bci(osr_bci),
 630       _ilt(nullptr),
 631       _stub_function(nullptr),
 632       _stub_name(nullptr),
 633       _stub_entry_point(nullptr),
 634       _max_node_limit(MaxNodeLimit),
 635       _post_loop_opts_phase(false),
 636       _merge_stores_phase(false),
 637       _allow_macro_nodes(true),
 638       _inlining_progress(false),
 639       _inlining_incrementally(false),
 640       _do_cleanup(false),
 641       _has_reserved_stack_access(target->has_reserved_stack_access()),
 642 #ifndef PRODUCT
 643       _igv_idx(0),
 644       _trace_opto_output(directive->TraceOptoOutputOption),
 645 #endif
 646       _has_method_handle_invokes(false),
 647       _clinit_barrier_on_entry(false),

 648       _stress_seed(0),
 649       _comp_arena(mtCompiler, Arena::Tag::tag_comp),
 650       _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 651       _env(ci_env),
 652       _directive(directive),
 653       _log(ci_env->log()),
 654       _first_failure_details(nullptr),
 655       _intrinsics(comp_arena(), 0, 0, nullptr),
 656       _macro_nodes(comp_arena(), 8, 0, nullptr),
 657       _parse_predicates(comp_arena(), 8, 0, nullptr),
 658       _template_assertion_predicate_opaques(comp_arena(), 8, 0, nullptr),
 659       _expensive_nodes(comp_arena(), 8, 0, nullptr),
 660       _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 661       _for_merge_stores_igvn(comp_arena(), 8, 0, nullptr),
 662       _unstable_if_traps(comp_arena(), 8, 0, nullptr),
 663       _coarsened_locks(comp_arena(), 8, 0, nullptr),
 664       _congraph(nullptr),
 665       NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 666           _unique(0),
 667       _dead_node_count(0),

 907       _compile_id(0),
 908       _options(Options::for_runtime_stub()),
 909       _method(nullptr),
 910       _entry_bci(InvocationEntryBci),
 911       _stub_function(stub_function),
 912       _stub_name(stub_name),
 913       _stub_entry_point(nullptr),
 914       _max_node_limit(MaxNodeLimit),
 915       _post_loop_opts_phase(false),
 916       _merge_stores_phase(false),
 917       _allow_macro_nodes(true),
 918       _inlining_progress(false),
 919       _inlining_incrementally(false),
 920       _has_reserved_stack_access(false),
 921 #ifndef PRODUCT
 922       _igv_idx(0),
 923       _trace_opto_output(directive->TraceOptoOutputOption),
 924 #endif
 925       _has_method_handle_invokes(false),
 926       _clinit_barrier_on_entry(false),

 927       _stress_seed(0),
 928       _comp_arena(mtCompiler, Arena::Tag::tag_comp),
 929       _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 930       _env(ci_env),
 931       _directive(directive),
 932       _log(ci_env->log()),
 933       _first_failure_details(nullptr),
 934       _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 935       _for_merge_stores_igvn(comp_arena(), 8, 0, nullptr),
 936       _congraph(nullptr),
 937       NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 938           _unique(0),
 939       _dead_node_count(0),
 940       _dead_node_list(comp_arena()),
 941       _node_arena_one(mtCompiler, Arena::Tag::tag_node),
 942       _node_arena_two(mtCompiler, Arena::Tag::tag_node),
 943       _node_arena(&_node_arena_one),
 944       _mach_constant_base_node(nullptr),
 945       _Compile_types(mtCompiler, Arena::Tag::tag_type),
 946       _initial_gvn(nullptr),

1065   set_do_scheduling(OptoScheduling);
1066 
1067   set_do_vector_loop(false);
1068   set_has_monitors(false);
1069   set_has_scoped_access(false);
1070 
1071   if (AllowVectorizeOnDemand) {
1072     if (has_method() && _directive->VectorizeOption) {
1073       set_do_vector_loop(true);
1074       NOT_PRODUCT(if (do_vector_loop() && Verbose) {tty->print("Compile::Init: do vectorized loops (SIMD like) for method %s\n",  method()->name()->as_quoted_ascii());})
1075     } else if (has_method() && method()->name() != nullptr &&
1076                method()->intrinsic_id() == vmIntrinsics::_forEachRemaining) {
1077       set_do_vector_loop(true);
1078     }
1079   }
1080   set_use_cmove(UseCMoveUnconditionally /* || do_vector_loop()*/); //TODO: consider do_vector_loop() mandate use_cmove unconditionally
1081   NOT_PRODUCT(if (use_cmove() && Verbose && has_method()) {tty->print("Compile::Init: use CMove without profitability tests for method %s\n",  method()->name()->as_quoted_ascii());})
1082 
1083   _max_node_limit = _directive->MaxNodeLimitOption;
1084 
1085   if (VM_Version::supports_fast_class_init_checks() && has_method() && !is_osr_compilation() && method()->needs_clinit_barrier()) {

1086     set_clinit_barrier_on_entry(true);



1087   }
1088   if (debug_info()->recording_non_safepoints()) {
1089     set_node_note_array(new(comp_arena()) GrowableArray<Node_Notes*>
1090                         (comp_arena(), 8, 0, nullptr));
1091     set_default_node_notes(Node_Notes::make(this));
1092   }
1093 
1094   const int grow_ats = 16;
1095   _max_alias_types = grow_ats;
1096   _alias_types   = NEW_ARENA_ARRAY(comp_arena(), AliasType*, grow_ats);
1097   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1098   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1099   {
1100     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1101   }
1102   // Initialize the first few types.
1103   _alias_types[AliasIdxTop]->Init(AliasIdxTop, nullptr);
1104   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1105   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1106   _num_alias_types = AliasIdxRaw+1;

4094           dead_nodes.push(in);
4095         }
4096       }
4097       m->disconnect_inputs(this);
4098     }
4099   }
4100 
4101   set_java_calls(frc.get_java_call_count());
4102   set_inner_loops(frc.get_inner_loop_count());
4103 
4104   // No infinite loops, no reason to bail out.
4105   return false;
4106 }
4107 
4108 //-----------------------------too_many_traps----------------------------------
4109 // Report if there are too many traps at the current method and bci.
4110 // Return true if there was a trap, and/or PerMethodTrapLimit is exceeded.
4111 bool Compile::too_many_traps(ciMethod* method,
4112                              int bci,
4113                              Deoptimization::DeoptReason reason) {







4114   ciMethodData* md = method->method_data();
4115   if (md->is_empty()) {
4116     // Assume the trap has not occurred, or that it occurred only
4117     // because of a transient condition during start-up in the interpreter.
4118     return false;
4119   }
4120   ciMethod* m = Deoptimization::reason_is_speculate(reason) ? this->method() : nullptr;
4121   if (md->has_trap_at(bci, m, reason) != 0) {
4122     // Assume PerBytecodeTrapLimit==0, for a more conservative heuristic.
4123     // Also, if there are multiple reasons, or if there is no per-BCI record,
4124     // assume the worst.
4125     if (log())
4126       log()->elem("observe trap='%s' count='%d'",
4127                   Deoptimization::trap_reason_name(reason),
4128                   md->trap_count(reason));
4129     return true;
4130   } else {
4131     // Ignore method/bci and see if there have been too many globally.
4132     return too_many_traps(reason, md);
4133   }
4134 }
4135 
4136 // Less-accurate variant which does not require a method and bci.
4137 bool Compile::too_many_traps(Deoptimization::DeoptReason reason,
4138                              ciMethodData* logmd) {




4139   if (trap_count(reason) >= Deoptimization::per_method_trap_limit(reason)) {
4140     // Too many traps globally.
4141     // Note that we use cumulative trap_count, not just md->trap_count.
4142     if (log()) {
4143       int mcount = (logmd == nullptr)? -1: (int)logmd->trap_count(reason);
4144       log()->elem("observe trap='%s' count='0' mcount='%d' ccount='%d'",
4145                   Deoptimization::trap_reason_name(reason),
4146                   mcount, trap_count(reason));
4147     }
4148     return true;
4149   } else {
4150     // The coast is clear.
4151     return false;
4152   }
4153 }
4154 
4155 //--------------------------too_many_recompiles--------------------------------
4156 // Report if there are too many recompiles at the current method and bci.
4157 // Consults PerBytecodeRecompilationCutoff and PerMethodRecompilationCutoff.
4158 // Is not eager to return true, since this will cause the compiler to use

4208   _allowed_reasons = 0;
4209   if (is_method_compilation()) {
4210     for (int rs = (int)Deoptimization::Reason_none+1; rs < Compile::trapHistLength; rs++) {
4211       assert(rs < BitsPerInt, "recode bit map");
4212       if (!too_many_traps((Deoptimization::DeoptReason) rs)) {
4213         _allowed_reasons |= nth_bit(rs);
4214       }
4215     }
4216   }
4217 }
4218 
4219 bool Compile::needs_clinit_barrier(ciMethod* method, ciMethod* accessing_method) {
4220   return method->is_static() && needs_clinit_barrier(method->holder(), accessing_method);
4221 }
4222 
4223 bool Compile::needs_clinit_barrier(ciField* field, ciMethod* accessing_method) {
4224   return field->is_static() && needs_clinit_barrier(field->holder(), accessing_method);
4225 }
4226 
4227 bool Compile::needs_clinit_barrier(ciInstanceKlass* holder, ciMethod* accessing_method) {
4228   if (holder->is_initialized()) {
4229     return false;
4230   }
4231   if (holder->is_being_initialized()) {
4232     if (accessing_method->holder() == holder) {
4233       // Access inside a class. The barrier can be elided when access happens in <clinit>,
4234       // <init>, or a static method. In all those cases, there was an initialization
4235       // barrier on the holder klass passed.
4236       if (accessing_method->is_static_initializer() ||
4237           accessing_method->is_object_initializer() ||
4238           accessing_method->is_static()) {
4239         return false;
4240       }
4241     } else if (accessing_method->holder()->is_subclass_of(holder)) {
4242       // Access from a subclass. The barrier can be elided only when access happens in <clinit>.
4243       // In case of <init> or a static method, the barrier is on the subclass is not enough:
4244       // child class can become fully initialized while its parent class is still being initialized.
4245       if (accessing_method->is_static_initializer()) {
4246         return false;
4247       }
4248     }
4249     ciMethod* root = method(); // the root method of compilation
4250     if (root != accessing_method) {
4251       return needs_clinit_barrier(holder, root); // check access in the context of compilation root

4511 Node* Compile::constrained_convI2L(PhaseGVN* phase, Node* value, const TypeInt* itype, Node* ctrl, bool carry_dependency) {
4512   if (ctrl != nullptr) {
4513     // Express control dependency by a CastII node with a narrow type.
4514     // Make the CastII node dependent on the control input to prevent the narrowed ConvI2L
4515     // node from floating above the range check during loop optimizations. Otherwise, the
4516     // ConvI2L node may be eliminated independently of the range check, causing the data path
4517     // to become TOP while the control path is still there (although it's unreachable).
4518     value = new CastIINode(ctrl, value, itype, carry_dependency ? ConstraintCastNode::StrongDependency : ConstraintCastNode::RegularDependency, true /* range check dependency */);
4519     value = phase->transform(value);
4520   }
4521   const TypeLong* ltype = TypeLong::make(itype->_lo, itype->_hi, itype->_widen);
4522   return phase->transform(new ConvI2LNode(value, ltype));
4523 }
4524 
4525 void Compile::dump_print_inlining() {
4526   inline_printer()->print_on(tty);
4527 }
4528 
4529 void Compile::log_late_inline(CallGenerator* cg) {
4530   if (log() != nullptr) {
4531     log()->head("late_inline method='%d'  inline_id='" JLONG_FORMAT "'", log()->identify(cg->method()),
4532                 cg->unique_id());
4533     JVMState* p = cg->call_node()->jvms();
4534     while (p != nullptr) {
4535       log()->elem("jvms bci='%d' method='%d'", p->bci(), log()->identify(p->method()));
4536       p = p->caller();
4537     }
4538     log()->tail("late_inline");
4539   }
4540 }
4541 
4542 void Compile::log_late_inline_failure(CallGenerator* cg, const char* msg) {
4543   log_late_inline(cg);
4544   if (log() != nullptr) {
4545     log()->inline_fail(msg);
4546   }
4547 }
4548 
4549 void Compile::log_inline_id(CallGenerator* cg) {
4550   if (log() != nullptr) {
4551     // The LogCompilation tool needs a unique way to identify late

 628       _method(target),
 629       _entry_bci(osr_bci),
 630       _ilt(nullptr),
 631       _stub_function(nullptr),
 632       _stub_name(nullptr),
 633       _stub_entry_point(nullptr),
 634       _max_node_limit(MaxNodeLimit),
 635       _post_loop_opts_phase(false),
 636       _merge_stores_phase(false),
 637       _allow_macro_nodes(true),
 638       _inlining_progress(false),
 639       _inlining_incrementally(false),
 640       _do_cleanup(false),
 641       _has_reserved_stack_access(target->has_reserved_stack_access()),
 642 #ifndef PRODUCT
 643       _igv_idx(0),
 644       _trace_opto_output(directive->TraceOptoOutputOption),
 645 #endif
 646       _has_method_handle_invokes(false),
 647       _clinit_barrier_on_entry(false),
 648       _has_clinit_barriers(false),
 649       _stress_seed(0),
 650       _comp_arena(mtCompiler, Arena::Tag::tag_comp),
 651       _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 652       _env(ci_env),
 653       _directive(directive),
 654       _log(ci_env->log()),
 655       _first_failure_details(nullptr),
 656       _intrinsics(comp_arena(), 0, 0, nullptr),
 657       _macro_nodes(comp_arena(), 8, 0, nullptr),
 658       _parse_predicates(comp_arena(), 8, 0, nullptr),
 659       _template_assertion_predicate_opaques(comp_arena(), 8, 0, nullptr),
 660       _expensive_nodes(comp_arena(), 8, 0, nullptr),
 661       _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 662       _for_merge_stores_igvn(comp_arena(), 8, 0, nullptr),
 663       _unstable_if_traps(comp_arena(), 8, 0, nullptr),
 664       _coarsened_locks(comp_arena(), 8, 0, nullptr),
 665       _congraph(nullptr),
 666       NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 667           _unique(0),
 668       _dead_node_count(0),

 908       _compile_id(0),
 909       _options(Options::for_runtime_stub()),
 910       _method(nullptr),
 911       _entry_bci(InvocationEntryBci),
 912       _stub_function(stub_function),
 913       _stub_name(stub_name),
 914       _stub_entry_point(nullptr),
 915       _max_node_limit(MaxNodeLimit),
 916       _post_loop_opts_phase(false),
 917       _merge_stores_phase(false),
 918       _allow_macro_nodes(true),
 919       _inlining_progress(false),
 920       _inlining_incrementally(false),
 921       _has_reserved_stack_access(false),
 922 #ifndef PRODUCT
 923       _igv_idx(0),
 924       _trace_opto_output(directive->TraceOptoOutputOption),
 925 #endif
 926       _has_method_handle_invokes(false),
 927       _clinit_barrier_on_entry(false),
 928       _has_clinit_barriers(false),
 929       _stress_seed(0),
 930       _comp_arena(mtCompiler, Arena::Tag::tag_comp),
 931       _barrier_set_state(BarrierSet::barrier_set()->barrier_set_c2()->create_barrier_state(comp_arena())),
 932       _env(ci_env),
 933       _directive(directive),
 934       _log(ci_env->log()),
 935       _first_failure_details(nullptr),
 936       _for_post_loop_igvn(comp_arena(), 8, 0, nullptr),
 937       _for_merge_stores_igvn(comp_arena(), 8, 0, nullptr),
 938       _congraph(nullptr),
 939       NOT_PRODUCT(_igv_printer(nullptr) COMMA)
 940           _unique(0),
 941       _dead_node_count(0),
 942       _dead_node_list(comp_arena()),
 943       _node_arena_one(mtCompiler, Arena::Tag::tag_node),
 944       _node_arena_two(mtCompiler, Arena::Tag::tag_node),
 945       _node_arena(&_node_arena_one),
 946       _mach_constant_base_node(nullptr),
 947       _Compile_types(mtCompiler, Arena::Tag::tag_type),
 948       _initial_gvn(nullptr),

1067   set_do_scheduling(OptoScheduling);
1068 
1069   set_do_vector_loop(false);
1070   set_has_monitors(false);
1071   set_has_scoped_access(false);
1072 
1073   if (AllowVectorizeOnDemand) {
1074     if (has_method() && _directive->VectorizeOption) {
1075       set_do_vector_loop(true);
1076       NOT_PRODUCT(if (do_vector_loop() && Verbose) {tty->print("Compile::Init: do vectorized loops (SIMD like) for method %s\n",  method()->name()->as_quoted_ascii());})
1077     } else if (has_method() && method()->name() != nullptr &&
1078                method()->intrinsic_id() == vmIntrinsics::_forEachRemaining) {
1079       set_do_vector_loop(true);
1080     }
1081   }
1082   set_use_cmove(UseCMoveUnconditionally /* || do_vector_loop()*/); //TODO: consider do_vector_loop() mandate use_cmove unconditionally
1083   NOT_PRODUCT(if (use_cmove() && Verbose && has_method()) {tty->print("Compile::Init: use CMove without profitability tests for method %s\n",  method()->name()->as_quoted_ascii());})
1084 
1085   _max_node_limit = _directive->MaxNodeLimitOption;
1086 
1087   if (VM_Version::supports_fast_class_init_checks() && has_method() && !is_osr_compilation() &&
1088       (method()->needs_clinit_barrier() || (do_clinit_barriers() && method()->is_static()))) {
1089     set_clinit_barrier_on_entry(true);
1090     if (do_clinit_barriers()) {
1091       set_has_clinit_barriers(true); // Entry clinit barrier is in prolog code.
1092     }
1093   }
1094   if (debug_info()->recording_non_safepoints()) {
1095     set_node_note_array(new(comp_arena()) GrowableArray<Node_Notes*>
1096                         (comp_arena(), 8, 0, nullptr));
1097     set_default_node_notes(Node_Notes::make(this));
1098   }
1099 
1100   const int grow_ats = 16;
1101   _max_alias_types = grow_ats;
1102   _alias_types   = NEW_ARENA_ARRAY(comp_arena(), AliasType*, grow_ats);
1103   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1104   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1105   {
1106     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1107   }
1108   // Initialize the first few types.
1109   _alias_types[AliasIdxTop]->Init(AliasIdxTop, nullptr);
1110   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1111   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1112   _num_alias_types = AliasIdxRaw+1;

4100           dead_nodes.push(in);
4101         }
4102       }
4103       m->disconnect_inputs(this);
4104     }
4105   }
4106 
4107   set_java_calls(frc.get_java_call_count());
4108   set_inner_loops(frc.get_inner_loop_count());
4109 
4110   // No infinite loops, no reason to bail out.
4111   return false;
4112 }
4113 
4114 //-----------------------------too_many_traps----------------------------------
4115 // Report if there are too many traps at the current method and bci.
4116 // Return true if there was a trap, and/or PerMethodTrapLimit is exceeded.
4117 bool Compile::too_many_traps(ciMethod* method,
4118                              int bci,
4119                              Deoptimization::DeoptReason reason) {
4120   if (method->has_trap_at(bci)) {
4121     return true;
4122   }
4123   if (PreloadReduceTraps && for_preload()) {
4124     // Preload code should not have traps, if possible.
4125     return true;
4126   }
4127   ciMethodData* md = method->method_data();
4128   if (md->is_empty()) {
4129     // Assume the trap has not occurred, or that it occurred only
4130     // because of a transient condition during start-up in the interpreter.
4131     return false;
4132   }
4133   ciMethod* m = Deoptimization::reason_is_speculate(reason) ? this->method() : nullptr;
4134   if (md->has_trap_at(bci, m, reason) != 0) {
4135     // Assume PerBytecodeTrapLimit==0, for a more conservative heuristic.
4136     // Also, if there are multiple reasons, or if there is no per-BCI record,
4137     // assume the worst.
4138     if (log())
4139       log()->elem("observe trap='%s' count='%d'",
4140                   Deoptimization::trap_reason_name(reason),
4141                   md->trap_count(reason));
4142     return true;
4143   } else {
4144     // Ignore method/bci and see if there have been too many globally.
4145     return too_many_traps(reason, md);
4146   }
4147 }
4148 
4149 // Less-accurate variant which does not require a method and bci.
4150 bool Compile::too_many_traps(Deoptimization::DeoptReason reason,
4151                              ciMethodData* logmd) {
4152   if (PreloadReduceTraps && for_preload()) {
4153     // Preload code should not have traps, if possible.
4154     return true;
4155   }
4156   if (trap_count(reason) >= Deoptimization::per_method_trap_limit(reason)) {
4157     // Too many traps globally.
4158     // Note that we use cumulative trap_count, not just md->trap_count.
4159     if (log()) {
4160       int mcount = (logmd == nullptr)? -1: (int)logmd->trap_count(reason);
4161       log()->elem("observe trap='%s' count='0' mcount='%d' ccount='%d'",
4162                   Deoptimization::trap_reason_name(reason),
4163                   mcount, trap_count(reason));
4164     }
4165     return true;
4166   } else {
4167     // The coast is clear.
4168     return false;
4169   }
4170 }
4171 
4172 //--------------------------too_many_recompiles--------------------------------
4173 // Report if there are too many recompiles at the current method and bci.
4174 // Consults PerBytecodeRecompilationCutoff and PerMethodRecompilationCutoff.
4175 // Is not eager to return true, since this will cause the compiler to use

4225   _allowed_reasons = 0;
4226   if (is_method_compilation()) {
4227     for (int rs = (int)Deoptimization::Reason_none+1; rs < Compile::trapHistLength; rs++) {
4228       assert(rs < BitsPerInt, "recode bit map");
4229       if (!too_many_traps((Deoptimization::DeoptReason) rs)) {
4230         _allowed_reasons |= nth_bit(rs);
4231       }
4232     }
4233   }
4234 }
4235 
4236 bool Compile::needs_clinit_barrier(ciMethod* method, ciMethod* accessing_method) {
4237   return method->is_static() && needs_clinit_barrier(method->holder(), accessing_method);
4238 }
4239 
4240 bool Compile::needs_clinit_barrier(ciField* field, ciMethod* accessing_method) {
4241   return field->is_static() && needs_clinit_barrier(field->holder(), accessing_method);
4242 }
4243 
4244 bool Compile::needs_clinit_barrier(ciInstanceKlass* holder, ciMethod* accessing_method) {
4245   if (holder->is_initialized() && !do_clinit_barriers()) {
4246     return false;
4247   }
4248   if (holder->is_being_initialized() || do_clinit_barriers()) {
4249     if (accessing_method->holder() == holder) {
4250       // Access inside a class. The barrier can be elided when access happens in <clinit>,
4251       // <init>, or a static method. In all those cases, there was an initialization
4252       // barrier on the holder klass passed.
4253       if (accessing_method->is_static_initializer() ||
4254           accessing_method->is_object_initializer() ||
4255           accessing_method->is_static()) {
4256         return false;
4257       }
4258     } else if (accessing_method->holder()->is_subclass_of(holder)) {
4259       // Access from a subclass. The barrier can be elided only when access happens in <clinit>.
4260       // In case of <init> or a static method, the barrier is on the subclass is not enough:
4261       // child class can become fully initialized while its parent class is still being initialized.
4262       if (accessing_method->is_static_initializer()) {
4263         return false;
4264       }
4265     }
4266     ciMethod* root = method(); // the root method of compilation
4267     if (root != accessing_method) {
4268       return needs_clinit_barrier(holder, root); // check access in the context of compilation root

4528 Node* Compile::constrained_convI2L(PhaseGVN* phase, Node* value, const TypeInt* itype, Node* ctrl, bool carry_dependency) {
4529   if (ctrl != nullptr) {
4530     // Express control dependency by a CastII node with a narrow type.
4531     // Make the CastII node dependent on the control input to prevent the narrowed ConvI2L
4532     // node from floating above the range check during loop optimizations. Otherwise, the
4533     // ConvI2L node may be eliminated independently of the range check, causing the data path
4534     // to become TOP while the control path is still there (although it's unreachable).
4535     value = new CastIINode(ctrl, value, itype, carry_dependency ? ConstraintCastNode::StrongDependency : ConstraintCastNode::RegularDependency, true /* range check dependency */);
4536     value = phase->transform(value);
4537   }
4538   const TypeLong* ltype = TypeLong::make(itype->_lo, itype->_hi, itype->_widen);
4539   return phase->transform(new ConvI2LNode(value, ltype));
4540 }
4541 
4542 void Compile::dump_print_inlining() {
4543   inline_printer()->print_on(tty);
4544 }
4545 
4546 void Compile::log_late_inline(CallGenerator* cg) {
4547   if (log() != nullptr) {
4548     log()->head("late_inline method='%d' inline_id='" JLONG_FORMAT "'", log()->identify(cg->method()),
4549                 cg->unique_id());
4550     JVMState* p = cg->call_node()->jvms();
4551     while (p != nullptr) {
4552       log()->elem("jvms bci='%d' method='%d'", p->bci(), log()->identify(p->method()));
4553       p = p->caller();
4554     }
4555     log()->tail("late_inline");
4556   }
4557 }
4558 
4559 void Compile::log_late_inline_failure(CallGenerator* cg, const char* msg) {
4560   log_late_inline(cg);
4561   if (log() != nullptr) {
4562     log()->inline_fail(msg);
4563   }
4564 }
4565 
4566 void Compile::log_inline_id(CallGenerator* cg) {
4567   if (log() != nullptr) {
4568     // The LogCompilation tool needs a unique way to identify late
< prev index next >