1 /*
  2  * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "ci/ciReplay.hpp"
 26 #include "classfile/vmSymbols.hpp"
 27 #include "compiler/compilationPolicy.hpp"
 28 #include "compiler/compileBroker.hpp"
 29 #include "compiler/compilerEvent.hpp"
 30 #include "compiler/compileLog.hpp"
 31 #include "interpreter/linkResolver.hpp"
 32 #include "jfr/jfrEvents.hpp"
 33 #include "oops/objArrayKlass.hpp"
 34 #include "opto/callGenerator.hpp"
 35 #include "opto/parse.hpp"
 36 #include "runtime/handles.inline.hpp"
 37 #include "utilities/events.hpp"
 38 
 39 //=============================================================================
 40 //------------------------------InlineTree-------------------------------------
 41 InlineTree::InlineTree(Compile* c,
 42                        const InlineTree *caller_tree, ciMethod* callee,
 43                        JVMState* caller_jvms, int caller_bci,
 44                        int max_inline_level) :
 45   C(c),
 46   _caller_jvms(nullptr),
 47   _method(callee),
 48   _late_inline(false),
 49   _caller_tree((InlineTree*) caller_tree),
 50   _count_inline_bcs(method()->code_size_for_inlining()),
 51   _max_inline_level(max_inline_level),
 52   _subtrees(c->comp_arena(), 2, 0, nullptr),
 53   _msg(nullptr)
 54 {
 55 #ifndef PRODUCT
 56   _count_inlines = 0;
 57   _forced_inline = false;
 58 #endif
 59   if (caller_jvms != nullptr) {
 60     // Keep a private copy of the caller_jvms:
 61     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
 62     _caller_jvms->set_bci(caller_jvms->bci());
 63     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
 64     assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
 65   }
 66   assert((caller_tree == nullptr ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
 67   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
 68   // Update hierarchical counts, count_inline_bcs() and count_inlines()
 69   InlineTree *caller = (InlineTree *)caller_tree;
 70   for( ; caller != nullptr; caller = ((InlineTree *)(caller->caller_tree())) ) {
 71     caller->_count_inline_bcs += count_inline_bcs();
 72     NOT_PRODUCT(caller->_count_inlines++;)
 73   }
 74 }
 75 
 76 /**
 77  *  Return true when EA is ON and a java constructor is called or
 78  *  a super constructor is called from an inlined java constructor.
 79  *  Also return true for boxing methods.
 80  *  Also return true for methods returning Iterator (including Iterable::iterator())
 81  *  that is essential for forall-loops performance.
 82  */
 83 static bool is_init_with_ea(ciMethod* callee_method,
 84                             ciMethod* caller_method, Compile* C) {
 85   if (!C->do_escape_analysis() || !EliminateAllocations) {
 86     return false; // EA is off
 87   }
 88   if (callee_method->is_object_constructor()) {
 89     return true; // constructor
 90   }
 91   if ((caller_method->is_object_constructor() || caller_method->is_class_initializer()) &&
 92       caller_method != C->method() &&
 93       caller_method->holder()->is_subclass_of(callee_method->holder())) {
 94     return true; // super constructor is called from inlined constructor
 95   }
 96   if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
 97     return true;
 98   }
 99   ciType *retType = callee_method->signature()->return_type();
100   ciKlass *iter = C->env()->Iterator_klass();
101   if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) {
102     return true;
103   }
104   return false;
105 }
106 
107 /**
108  *  Force inlining unboxing accessor.
109  */
110 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
111   return C->eliminate_boxing() && callee_method->is_unboxing_method();
112 }
113 
114 // positive filter: should callee be inlined?
115 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
116                                JVMState* caller_jvms, bool& should_delay, ciCallProfile& profile) {
117   int caller_bci = caller_jvms->bci();
118   // Allows targeted inlining
119   if (C->directive()->should_inline(callee_method)) {
120     set_msg("force inline by CompileCommand");
121     _forced_inline = true;
122     return true;
123   }
124 
125   if (callee_method->force_inline()) {
126     set_msg("force inline by annotation");
127     _forced_inline = true;
128     return true;
129   }
130 
131   int inline_depth = inline_level() + 1;
132   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
133     if (should_delay) {
134       set_msg("force (incremental) inline by ciReplay");
135     } else {
136       set_msg("force inline by ciReplay");
137     }
138     _forced_inline = true;
139     return true;
140   }
141 
142   int size = callee_method->code_size_for_inlining();
143 
144   // Check for too many throws (and not too huge)
145   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
146      size < InlineThrowMaxSize ) {
147     if (Verbose) {
148       outputStream* stream = C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS);
149       stream->print("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
150     }
151     set_msg("many throws");
152     return true;
153   }
154 
155   int default_max_inline_size = C->max_inline_size();
156   int inline_small_code_size  = InlineSmallCode / 4;
157   int max_inline_size         = default_max_inline_size;
158 
159   int call_site_count  = caller_method->scale_count(profile.count());
160   int invoke_count     = caller_method->interpreter_invocation_count();
161 
162   assert(invoke_count != 0, "require invocation count greater than zero");
163   double freq = (double)call_site_count / (double)invoke_count;
164 
165   // bump the max size if the call is frequent
166   if ((freq >= InlineFrequencyRatio) ||
167       is_unboxing_method(callee_method, C) ||
168       is_init_with_ea(callee_method, caller_method, C)) {
169 
170     max_inline_size = C->freq_inline_size();
171     if (size <= max_inline_size && TraceFrequencyInlining) {
172       outputStream* stream = C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS);
173       stream->print("Inlined frequent method (freq=%lf):", freq);
174     }
175   } else {
176     // Not hot.  Check for medium-sized pre-existing nmethod at cold sites.
177     if (callee_method->has_compiled_code() &&
178         callee_method->inline_instructions_size() > inline_small_code_size) {
179       set_msg("already compiled into a medium method");
180       return false;
181     }
182   }
183   if (size > max_inline_size) {
184     if (max_inline_size > default_max_inline_size) {
185       set_msg("hot method too big");
186     } else {
187       set_msg("too big");
188     }
189     return false;
190   }
191   return true;
192 }
193 
194 
195 // negative filter: should callee NOT be inlined?
196 bool InlineTree::should_not_inline(ciMethod* callee_method, ciMethod* caller_method,
197                                    int caller_bci, bool& should_delay, ciCallProfile& profile) {
198   const char* fail_msg = nullptr;
199 
200   // First check all inlining restrictions which are required for correctness
201   if (callee_method->is_abstract()) {
202     fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
203   } else if (!callee_method->holder()->is_initialized() &&
204              // access allowed in the context of static initializer
205              C->needs_clinit_barrier(callee_method->holder(), caller_method)) {
206     fail_msg = "method holder not initialized";
207   } else if (callee_method->is_native()) {
208     fail_msg = "native method";
209   } else if (callee_method->dont_inline()) {
210     fail_msg = "don't inline by annotation";
211   }
212 
213   // Don't inline a method that changes Thread.currentThread() except
214   // into another method that is annotated @ChangesCurrentThread.
215   if (callee_method->changes_current_thread()
216       && ! C->method()->changes_current_thread()) {
217     fail_msg = "method changes current thread";
218   }
219 
220   // one more inlining restriction
221   if (fail_msg == nullptr && callee_method->has_unloaded_classes_in_signature()) {
222     fail_msg = "unloaded signature classes";
223   }
224 
225   if (fail_msg != nullptr) {
226     set_msg(fail_msg);
227     return true;
228   }
229 
230   // ignore heuristic controls on inlining
231   if (C->directive()->should_inline(callee_method)) {
232     set_msg("force inline by CompileCommand");
233     return false;
234   }
235 
236   if (C->directive()->should_not_inline(callee_method)) {
237     set_msg("disallowed by CompileCommand");
238     return true;
239   }
240 
241   int inline_depth = inline_level() + 1;
242   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
243     if (should_delay) {
244       set_msg("force (incremental) inline by ciReplay");
245     } else {
246       set_msg("force inline by ciReplay");
247     }
248     return false;
249   }
250 
251   if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
252     set_msg("disallowed by ciReplay");
253     return true;
254   }
255 
256   if (ciReplay::should_not_inline(callee_method)) {
257     set_msg("disallowed by ciReplay");
258     return true;
259   }
260 
261   if (callee_method->force_inline()) {
262     set_msg("force inline by annotation");
263     return false;
264   }
265 
266   // Now perform checks which are heuristic
267 
268   if (is_unboxing_method(callee_method, C)) {
269     // Inline unboxing methods.
270     return false;
271   }
272 
273   if (callee_method->has_compiled_code() &&
274       callee_method->inline_instructions_size() > InlineSmallCode) {
275     set_msg("already compiled into a big method");
276     return true;
277   }
278 
279   // don't inline exception code unless the top method belongs to an
280   // exception class
281   if (caller_tree() != nullptr &&
282       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
283     const InlineTree *top = this;
284     while (top->caller_tree() != nullptr) top = top->caller_tree();
285     ciInstanceKlass* k = top->method()->holder();
286     if (!k->is_subclass_of(C->env()->Throwable_klass())) {
287       set_msg("exception method");
288       return true;
289     }
290   }
291 
292   // use frequency-based objections only for non-trivial methods
293   if (callee_method->code_size() <= MaxTrivialSize) {
294     return false;
295   }
296 
297   // don't use counts with -Xcomp
298   if (UseInterpreter) {
299     if (!callee_method->has_compiled_code() &&
300         !callee_method->was_executed_more_than(0)) {
301       set_msg("never executed");
302       return true;
303     }
304 
305     if (is_init_with_ea(callee_method, caller_method, C)) {
306       // Escape Analysis: inline all executed constructors
307       return false;
308     }
309 
310     if (MinInlineFrequencyRatio > 0) {
311       int call_site_count  = caller_method->scale_count(profile.count());
312       int invoke_count     = caller_method->interpreter_invocation_count();
313       assert(invoke_count != 0, "require invocation count greater than zero");
314       double freq = (double)call_site_count / (double)invoke_count;
315       // avoid division by 0, set divisor to at least 1
316       int cp_min_inv = MAX2(1, CompilationPolicy::min_invocations());
317       double min_freq = MAX2(MinInlineFrequencyRatio, 1.0 / cp_min_inv);
318 
319       if (freq < min_freq) {
320         set_msg("low call site frequency");
321         return true;
322       }
323     }
324   }
325 
326   return false;
327 }
328 
329 bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) {
330   if (!UseInterpreter) {
331     return false; // -Xcomp
332   }
333   if (profile.count() > 0) {
334     return false; // reachable according to profile
335   }
336   if (!callee_method->was_executed_more_than(0)) {
337     return true; // callee was never executed
338   }
339   if (caller_method->is_not_reached(caller_bci)) {
340     return true; // call site not resolved
341   }
342   if (profile.count() == -1) {
343     return false; // immature profile; optimistically treat as reached
344   }
345   assert(profile.count() == 0, "sanity");
346 
347   // Profile info is scarce.
348   // Try to guess: check if the call site belongs to a start block.
349   // Call sites in a start block should be reachable if no exception is thrown earlier.
350   ciMethodBlocks* caller_blocks = caller_method->get_method_blocks();
351   bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0;
352   if (is_start_block) {
353     return false; // treat the call reached as part of start block
354   }
355   return true; // give up and treat the call site as not reached
356 }
357 
358 //-----------------------------try_to_inline-----------------------------------
359 // return true if ok
360 // Relocated from "InliningClosure::try_to_inline"
361 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
362                                int caller_bci, JVMState* jvms, ciCallProfile& profile,
363                                bool& should_delay) {
364 
365   if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
366     if (!callee_method->force_inline() || !IncrementalInline) {
367       set_msg("size > DesiredMethodLimit");
368       return false;
369     } else if (!C->inlining_incrementally()) {
370       should_delay = true;
371     }
372   }
373 
374   _forced_inline = false; // Reset
375 
376   // 'should_delay' can be overridden during replay compilation
377   if (!should_inline(callee_method, caller_method, jvms, should_delay, profile)) {
378     return false;
379   }
380   // 'should_delay' can be overridden during replay compilation
381   if (should_not_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
382     return false;
383   }
384 
385   if (InlineAccessors && callee_method->is_accessor()) {
386     // accessor methods are not subject to any of the following limits.
387     set_msg("accessor");
388     return true;
389   }
390 
391   // suppress a few checks for accessors and trivial methods
392   if (callee_method->code_size() > MaxTrivialSize) {
393 
394     // don't inline into giant methods
395     if (C->over_inlining_cutoff()) {
396       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
397           || !IncrementalInline) {
398         set_msg("NodeCountInliningCutoff");
399         return false;
400       } else {
401         should_delay = true;
402       }
403     }
404 
405     if (!UseInterpreter &&
406         is_init_with_ea(callee_method, caller_method, C)) {
407       // Escape Analysis stress testing when running Xcomp:
408       // inline constructors even if they are not reached.
409     } else if (forced_inline()) {
410       // Inlining was forced by CompilerOracle, ciReplay or annotation
411     } else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) {
412       // don't inline unreached call sites
413        set_msg("call site not reached");
414        return false;
415     }
416   }
417 
418   if (!C->do_inlining() && InlineAccessors) {
419     set_msg("not an accessor");
420     return false;
421   }
422 
423   // Limit inlining depth in case inlining is forced or
424   // _max_inline_level was increased to compensate for lambda forms.
425   if (inline_level() > MaxForceInlineLevel) {
426     set_msg("MaxForceInlineLevel");
427     return false;
428   }
429   if (inline_level() > _max_inline_level) {
430     if (!callee_method->force_inline() || !IncrementalInline) {
431       set_msg("inlining too deep");
432       return false;
433     } else if (!C->inlining_incrementally()) {
434       should_delay = true;
435     }
436   }
437 
438   // detect direct and indirect recursive inlining
439   {
440     // count the current method and the callee
441     const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
442     int inline_level = 0;
443     if (!is_compiled_lambda_form) {
444       if (method() == callee_method) {
445         inline_level++;
446       }
447     }
448     // count callers of current method and callee
449     Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : nullptr;
450     for (JVMState* j = jvms->caller(); j != nullptr && j->has_method(); j = j->caller()) {
451       if (j->method() == callee_method) {
452         if (is_compiled_lambda_form) {
453           // Since compiled lambda forms are heavily reused we allow recursive inlining.  If it is truly
454           // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
455           // compiler stack.
456           Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
457           if (caller_argument0 == callee_argument0) {
458             inline_level++;
459           }
460         } else {
461           inline_level++;
462         }
463       }
464     }
465     if (inline_level > MaxRecursiveInlineLevel) {
466       set_msg("recursive inlining is too deep");
467       return false;
468     }
469   }
470 
471   int size = callee_method->code_size_for_inlining();
472 
473   if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
474     if (!callee_method->force_inline() || !IncrementalInline) {
475       set_msg("size > DesiredMethodLimit");
476       return false;
477     } else if (!C->inlining_incrementally()) {
478       should_delay = true;
479     }
480   }
481 
482   // ok, inline this method
483   return true;
484 }
485 
486 //------------------------------pass_initial_checks----------------------------
487 bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
488   // Check if a callee_method was suggested
489   if (callee_method == nullptr) {
490     return false;
491   }
492   ciInstanceKlass *callee_holder = callee_method->holder();
493   // Check if klass of callee_method is loaded
494   if (!callee_holder->is_loaded()) {
495     return false;
496   }
497   if (!callee_holder->is_initialized() &&
498       // access allowed in the context of static initializer
499       C->needs_clinit_barrier(callee_holder, caller_method)) {
500     return false;
501   }
502   if( !UseInterpreter ) /* running Xcomp */ {
503     // Checks that constant pool's call site has been visited
504     // stricter than callee_holder->is_initialized()
505     ciBytecodeStream iter(caller_method);
506     iter.force_bci(caller_bci);
507     Bytecodes::Code call_bc = iter.cur_bc();
508     // An invokedynamic instruction does not have a klass.
509     if (call_bc != Bytecodes::_invokedynamic) {
510       int index = iter.get_index_u2();
511       if (!caller_method->is_klass_loaded(index, call_bc, true)) {
512         return false;
513       }
514       // Try to do constant pool resolution if running Xcomp
515       if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
516         return false;
517       }
518     }
519   }
520   return true;
521 }
522 
523 //------------------------------check_can_parse--------------------------------
524 const char* InlineTree::check_can_parse(ciMethod* callee) {
525   // Certain methods cannot be parsed at all:
526   if ( callee->is_native())                     return "native method";
527   if ( callee->is_abstract())                   return "abstract method";
528   if (!callee->has_balanced_monitors())         return "not compilable (unbalanced monitors)";
529   if ( callee->get_flow_analysis()->failing())  return "not compilable (flow analysis failed)";
530   if (!callee->can_be_parsed())                 return "cannot be parsed";
531   return nullptr;
532 }
533 
534 //------------------------------print_inlining---------------------------------
535 void InlineTree::print_inlining(ciMethod* callee_method, JVMState* jvm, bool success) const {
536   int caller_bci = jvm->bci();
537   ciMethod* caller_method = jvm->method();
538   const char* inline_msg = msg();
539   assert(inline_msg != nullptr, "just checking");
540   if (C->log() != nullptr) {
541     if (success) {
542       C->log()->inline_success(inline_msg);
543     } else {
544       C->log()->inline_fail(inline_msg);
545     }
546   }
547   CompileTask::print_inlining_ul(callee_method, inline_level(), caller_bci, inlining_result_of(success), inline_msg);
548   C->inline_printer()->record(callee_method, jvm, inlining_result_of(success), inline_msg);
549   EventCompilerInlining event;
550   if (event.should_commit()) {
551     guarantee(callee_method != nullptr, "would crash in CompilerEvent::InlineEvent::post");
552     CompilerEvent::InlineEvent::post(event, C->compile_id(), caller_method->get_Method(), callee_method, success, inline_msg, caller_bci);
553   }
554 }
555 
556 //------------------------------ok_to_inline-----------------------------------
557 bool InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile,
558                               bool& should_delay) {
559 #ifdef ASSERT
560   assert(callee_method != nullptr, "caller checks for optimized virtual!");
561   // Make sure the incoming jvms has the same information content as me.
562   // This means that we can eventually make this whole class AllStatic.
563   if (jvms->caller() == nullptr) {
564     assert(_caller_jvms == nullptr, "redundant instance state");
565   } else {
566     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
567   }
568   assert(_method == jvms->method(), "redundant instance state");
569 #endif
570   int         caller_bci    = jvms->bci();
571   ciMethod*   caller_method = jvms->method();
572 
573   // Do some initial checks.
574   if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
575     set_msg("failed initial checks");
576     print_inlining(callee_method, jvms, false /* !success */);
577     return false;
578   }
579 
580   // Do some parse checks.
581   set_msg(check_can_parse(callee_method));
582   if (msg() != nullptr) {
583     print_inlining(callee_method, jvms, false /* !success */);
584     return false;
585   }
586 
587   // Check if inlining policy says no.
588   bool success = try_to_inline(callee_method, caller_method, caller_bci, jvms, profile,
589                                should_delay); // out
590   if (success) {
591     // Inline!
592     if (msg() == nullptr) {
593       set_msg("inline (hot)");
594     }
595     print_inlining(callee_method, jvms, true /* success */);
596     InlineTree* callee_tree = build_inline_tree_for_callee(callee_method, jvms, caller_bci);
597     if (should_delay) {
598       // Record late inlining decision in order to dump it for compiler replay
599       callee_tree->set_late_inline();
600     }
601     return true;
602   } else {
603     // Do not inline
604     if (msg() == nullptr) {
605       set_msg("too cold to inline");
606     }
607     print_inlining(callee_method, jvms, false /* !success */);
608     return false;
609   }
610 }
611 
612 //------------------------------build_inline_tree_for_callee-------------------
613 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
614   // Attempt inlining.
615   InlineTree* old_ilt = callee_at(caller_bci, callee_method);
616   if (old_ilt != nullptr) {
617     return old_ilt;
618   }
619   int max_inline_level_adjust = 0;
620   if (caller_jvms->method() != nullptr) {
621     if (caller_jvms->method()->is_compiled_lambda_form()) {
622       max_inline_level_adjust += 1;  // don't count actions in MH or indy adapter frames
623     } else if (callee_method->is_method_handle_intrinsic() ||
624                callee_method->is_compiled_lambda_form()) {
625       max_inline_level_adjust += 1;  // don't count method handle calls from java.lang.invoke implementation
626     }
627     if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
628       C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS, " \\-> discounting inline depth");
629     }
630     if (max_inline_level_adjust != 0 && C->log()) {
631       int id1 = C->log()->identify(caller_jvms->method());
632       int id2 = C->log()->identify(callee_method);
633       C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
634     }
635   }
636   // Allocate in the comp_arena to make sure the InlineTree is live when dumping a replay compilation file
637   InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, _max_inline_level + max_inline_level_adjust);
638   _subtrees.append(ilt);
639 
640   NOT_PRODUCT( _count_inlines += 1; )
641 
642   return ilt;
643 }
644 
645 
646 //---------------------------------------callee_at-----------------------------
647 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
648   for (int i = 0; i < _subtrees.length(); i++) {
649     InlineTree* sub = _subtrees.at(i);
650     if (sub->caller_bci() == bci && callee == sub->method()) {
651       return sub;
652     }
653   }
654   return nullptr;
655 }
656 
657 
658 //------------------------------build_inline_tree_root-------------------------
659 InlineTree *InlineTree::build_inline_tree_root() {
660   Compile* C = Compile::current();
661 
662   // Root of inline tree
663   InlineTree* ilt = new InlineTree(C, nullptr, C->method(), nullptr, -1, MaxInlineLevel);
664 
665   return ilt;
666 }
667 
668 
669 //-------------------------find_subtree_from_root-----------------------------
670 // Given a jvms, which determines a call chain from the root method,
671 // find the corresponding inline tree.
672 // Note: This method will be removed or replaced as InlineTree goes away.
673 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
674   InlineTree* iltp = root;
675   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
676   for (uint d = 1; d <= depth; d++) {
677     JVMState* jvmsp  = jvms->of_depth(d);
678     // Select the corresponding subtree for this bci.
679     assert(jvmsp->method() == iltp->method(), "tree still in sync");
680     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
681     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
682     if (sub == nullptr) {
683       if (d == depth) {
684         sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
685       }
686       guarantee(sub != nullptr, "should be a sub-ilt here");
687       return sub;
688     }
689     iltp = sub;
690   }
691   return iltp;
692 }
693 
694 // Count number of nodes in this subtree
695 int InlineTree::count() const {
696   int result = 1;
697   for (int i = 0 ; i < _subtrees.length(); i++) {
698     result += _subtrees.at(i)->count();
699   }
700   return result;
701 }
702 
703 void InlineTree::dump_replay_data(outputStream* out, int depth_adjust) {
704   out->print(" %d %d %d ", inline_level() + depth_adjust, caller_bci(), _late_inline);
705   method()->dump_name_as_ascii(out);
706   for (int i = 0 ; i < _subtrees.length(); i++) {
707     _subtrees.at(i)->dump_replay_data(out, depth_adjust);
708   }
709 }
710 
711 
712 #ifndef PRODUCT
713 void InlineTree::print_impl(outputStream* st, int indent) const {
714   for (int i = 0; i < indent; i++) st->print(" ");
715   st->print(" @ %d", caller_bci());
716   method()->print_short_name(st);
717   st->cr();
718 
719   for (int i = 0 ; i < _subtrees.length(); i++) {
720     _subtrees.at(i)->print_impl(st, indent + 2);
721   }
722 }
723 
724 void InlineTree::print_value_on(outputStream* st) const {
725   print_impl(st, 2);
726 }
727 #endif