1 /*
  2  * Copyright (c) 1998, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciReplay.hpp"
 27 #include "classfile/vmSymbols.hpp"
 28 #include "compiler/compilationPolicy.hpp"
 29 #include "compiler/compileBroker.hpp"
 30 #include "compiler/compilerEvent.hpp"
 31 #include "compiler/compileLog.hpp"
 32 #include "interpreter/linkResolver.hpp"
 33 #include "jfr/jfrEvents.hpp"
 34 #include "oops/objArrayKlass.hpp"
 35 #include "opto/callGenerator.hpp"
 36 #include "opto/parse.hpp"
 37 #include "runtime/handles.inline.hpp"
 38 #include "utilities/events.hpp"
 39 
 40 //=============================================================================
 41 //------------------------------InlineTree-------------------------------------
 42 InlineTree::InlineTree(Compile* c,
 43                        const InlineTree *caller_tree, ciMethod* callee,
 44                        JVMState* caller_jvms, int caller_bci,
 45                        int max_inline_level) :
 46   C(c),
 47   _caller_jvms(nullptr),
 48   _method(callee),
 49   _late_inline(false),
 50   _caller_tree((InlineTree*) caller_tree),
 51   _count_inline_bcs(method()->code_size_for_inlining()),
 52   _max_inline_level(max_inline_level),
 53   _subtrees(c->comp_arena(), 2, 0, nullptr),
 54   _msg(nullptr)
 55 {
 56 #ifndef PRODUCT
 57   _count_inlines = 0;
 58   _forced_inline = false;
 59 #endif
 60   if (caller_jvms != nullptr) {
 61     // Keep a private copy of the caller_jvms:
 62     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
 63     _caller_jvms->set_bci(caller_jvms->bci());
 64     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
 65     assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
 66   }
 67   assert((caller_tree == nullptr ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
 68   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
 69   // Update hierarchical counts, count_inline_bcs() and count_inlines()
 70   InlineTree *caller = (InlineTree *)caller_tree;
 71   for( ; caller != nullptr; caller = ((InlineTree *)(caller->caller_tree())) ) {
 72     caller->_count_inline_bcs += count_inline_bcs();
 73     NOT_PRODUCT(caller->_count_inlines++;)
 74   }
 75 }
 76 
 77 /**
 78  *  Return true when EA is ON and a java constructor is called or
 79  *  a super constructor is called from an inlined java constructor.
 80  *  Also return true for boxing methods.
 81  *  Also return true for methods returning Iterator (including Iterable::iterator())
 82  *  that is essential for forall-loops performance.
 83  */
 84 static bool is_init_with_ea(ciMethod* callee_method,
 85                             ciMethod* caller_method, Compile* C) {
 86   if (!C->do_escape_analysis() || !EliminateAllocations) {
 87     return false; // EA is off
 88   }
 89   if (callee_method->is_initializer()) {
 90     return true; // constructor
 91   }
 92   if (caller_method->is_initializer() &&
 93       caller_method != C->method() &&
 94       caller_method->holder()->is_subclass_of(callee_method->holder())) {
 95     return true; // super constructor is called from inlined constructor
 96   }
 97   if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
 98     return true;
 99   }
100   ciType *retType = callee_method->signature()->return_type();
101   ciKlass *iter = C->env()->Iterator_klass();
102   if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) {
103     return true;
104   }
105   return false;
106 }
107 
108 /**
109  *  Force inlining unboxing accessor.
110  */
111 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
112   return C->eliminate_boxing() && callee_method->is_unboxing_method();
113 }
114 
115 // positive filter: should callee be inlined?
116 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
117                                int caller_bci, bool& should_delay, ciCallProfile& profile) {
118   // Allows targeted inlining
119   if (C->directive()->should_inline(callee_method)) {
120     set_msg("force inline by CompileCommand");
121     _forced_inline = true;
122     return true;
123   }
124 
125   if (callee_method->force_inline()) {
126     set_msg("force inline by annotation");
127     _forced_inline = true;
128     return true;
129   }
130 
131   int inline_depth = inline_level() + 1;
132   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
133     if (should_delay) {
134       set_msg("force (incremental) inline by ciReplay");
135     } else {
136       set_msg("force inline by ciReplay");
137     }
138     _forced_inline = true;
139     return true;
140   }
141 
142   int size = callee_method->code_size_for_inlining();
143 
144   // Check for too many throws (and not too huge)
145   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
146      size < InlineThrowMaxSize ) {
147     if (C->print_inlining() && Verbose) {
148       CompileTask::print_inline_indent(inline_level());
149       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
150     }
151     set_msg("many throws");
152     return true;
153   }
154 
155   int default_max_inline_size = C->max_inline_size();
156   int inline_small_code_size  = InlineSmallCode / 4;
157   int max_inline_size         = default_max_inline_size;
158 
159   int call_site_count  = caller_method->scale_count(profile.count());
160   int invoke_count     = caller_method->interpreter_invocation_count();
161 
162   assert(invoke_count != 0, "require invocation count greater than zero");
163   double freq = (double)call_site_count / (double)invoke_count;
164 
165   // bump the max size if the call is frequent
166   if ((freq >= InlineFrequencyRatio) ||
167       is_unboxing_method(callee_method, C) ||
168       is_init_with_ea(callee_method, caller_method, C)) {
169 
170     max_inline_size = C->freq_inline_size();
171     if (size <= max_inline_size && TraceFrequencyInlining) {
172       CompileTask::print_inline_indent(inline_level());
173       tty->print_cr("Inlined frequent method (freq=%lf):", freq);
174       CompileTask::print_inline_indent(inline_level());
175       callee_method->print();
176       tty->cr();
177     }
178   } else {
179     // Not hot.  Check for medium-sized pre-existing nmethod at cold sites.
180     if (callee_method->has_compiled_code() &&
181         callee_method->inline_instructions_size() > inline_small_code_size) {
182       set_msg("already compiled into a medium method");
183       return false;
184     }
185   }
186   if (size > max_inline_size) {
187     if (max_inline_size > default_max_inline_size) {
188       set_msg("hot method too big");
189     } else {
190       set_msg("too big");
191     }
192     return false;
193   }
194   return true;
195 }
196 
197 
198 // negative filter: should callee NOT be inlined?
199 bool InlineTree::should_not_inline(ciMethod* callee_method, ciMethod* caller_method,
200                                    int caller_bci, bool& should_delay, ciCallProfile& profile) {
201   const char* fail_msg = nullptr;
202 
203   // First check all inlining restrictions which are required for correctness
204   if (callee_method->is_abstract()) {
205     fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
206   } else if (!callee_method->holder()->is_initialized() &&
207              // access allowed in the context of static initializer
208              C->needs_clinit_barrier(callee_method->holder(), caller_method)) {
209     fail_msg = "method holder not initialized";
210   } else if (callee_method->is_native()) {
211     fail_msg = "native method";
212   } else if (callee_method->dont_inline()) {
213     fail_msg = "don't inline by annotation";
214   }
215 
216   // Don't inline a method that changes Thread.currentThread() except
217   // into another method that is annotated @ChangesCurrentThread.
218   if (callee_method->changes_current_thread()
219       && ! C->method()->changes_current_thread()) {
220     fail_msg = "method changes current thread";
221   }
222 
223   // one more inlining restriction
224   if (fail_msg == nullptr && callee_method->has_unloaded_classes_in_signature()) {
225     fail_msg = "unloaded signature classes";
226   }
227 
228   if (fail_msg != nullptr) {
229     set_msg(fail_msg);
230     return true;
231   }
232 
233   // ignore heuristic controls on inlining
234   if (C->directive()->should_inline(callee_method)) {
235     set_msg("force inline by CompileCommand");
236     return false;
237   }
238 
239   if (C->directive()->should_not_inline(callee_method)) {
240     set_msg("disallowed by CompileCommand");
241     return true;
242   }
243 
244   int inline_depth = inline_level() + 1;
245   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
246     if (should_delay) {
247       set_msg("force (incremental) inline by ciReplay");
248     } else {
249       set_msg("force inline by ciReplay");
250     }
251     return false;
252   }
253 
254   if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
255     set_msg("disallowed by ciReplay");
256     return true;
257   }
258 
259   if (ciReplay::should_not_inline(callee_method)) {
260     set_msg("disallowed by ciReplay");
261     return true;
262   }
263 
264   if (callee_method->force_inline()) {
265     set_msg("force inline by annotation");
266     return false;
267   }
268 
269   // Now perform checks which are heuristic
270 
271   if (is_unboxing_method(callee_method, C)) {
272     // Inline unboxing methods.
273     return false;
274   }
275 
276   if (callee_method->has_compiled_code() &&
277       callee_method->inline_instructions_size() > InlineSmallCode) {
278     set_msg("already compiled into a big method");
279     return true;
280   }
281 
282   // don't inline exception code unless the top method belongs to an
283   // exception class
284   if (caller_tree() != nullptr &&
285       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
286     const InlineTree *top = this;
287     while (top->caller_tree() != nullptr) top = top->caller_tree();
288     ciInstanceKlass* k = top->method()->holder();
289     if (!k->is_subclass_of(C->env()->Throwable_klass())) {
290       set_msg("exception method");
291       return true;
292     }
293   }
294 
295   // use frequency-based objections only for non-trivial methods
296   if (callee_method->code_size() <= MaxTrivialSize) {
297     return false;
298   }
299 
300   // don't use counts with -Xcomp
301   if (UseInterpreter) {
302     if (!callee_method->has_compiled_code() &&
303         !callee_method->was_executed_more_than(0)) {
304       set_msg("never executed");
305       return true;
306     }
307 
308     if (is_init_with_ea(callee_method, caller_method, C)) {
309       // Escape Analysis: inline all executed constructors
310       return false;
311     }
312 
313     if (MinInlineFrequencyRatio > 0) {
314       int call_site_count  = caller_method->scale_count(profile.count());
315       int invoke_count     = caller_method->interpreter_invocation_count();
316       assert(invoke_count != 0, "require invocation count greater than zero");
317       double freq = (double)call_site_count / (double)invoke_count;
318       double min_freq = MAX2(MinInlineFrequencyRatio, 1.0 / CompilationPolicy::min_invocations());
319 
320       if (freq < min_freq) {
321         set_msg("low call site frequency");
322         return true;
323       }
324     }
325   }
326 
327   return false;
328 }
329 
330 bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) {
331   if (!UseInterpreter) {
332     return false; // -Xcomp
333   }
334   if (profile.count() > 0) {
335     return false; // reachable according to profile
336   }
337   if (!callee_method->was_executed_more_than(0)) {
338     return true; // callee was never executed
339   }
340   if (caller_method->is_not_reached(caller_bci)) {
341     return true; // call site not resolved
342   }
343   if (profile.count() == -1) {
344     return false; // immature profile; optimistically treat as reached
345   }
346   assert(profile.count() == 0, "sanity");
347 
348   // Profile info is scarce.
349   // Try to guess: check if the call site belongs to a start block.
350   // Call sites in a start block should be reachable if no exception is thrown earlier.
351   ciMethodBlocks* caller_blocks = caller_method->get_method_blocks();
352   bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0;
353   if (is_start_block) {
354     return false; // treat the call reached as part of start block
355   }
356   return true; // give up and treat the call site as not reached
357 }
358 
359 //-----------------------------try_to_inline-----------------------------------
360 // return true if ok
361 // Relocated from "InliningClosure::try_to_inline"
362 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
363                                int caller_bci, JVMState* jvms, ciCallProfile& profile,
364                                bool& should_delay) {
365 
366   if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
367     if (!callee_method->force_inline() || !IncrementalInline) {
368       set_msg("size > DesiredMethodLimit");
369       return false;
370     } else if (!C->inlining_incrementally()) {
371       should_delay = true;
372     }
373   }
374 
375   _forced_inline = false; // Reset
376 
377   // 'should_delay' can be overridden during replay compilation
378   if (!should_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
379     return false;
380   }
381   // 'should_delay' can be overridden during replay compilation
382   if (should_not_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
383     return false;
384   }
385 
386   if (InlineAccessors && callee_method->is_accessor()) {
387     // accessor methods are not subject to any of the following limits.
388     set_msg("accessor");
389     return true;
390   }
391 
392   // suppress a few checks for accessors and trivial methods
393   if (callee_method->code_size() > MaxTrivialSize) {
394 
395     // don't inline into giant methods
396     if (C->over_inlining_cutoff()) {
397       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
398           || !IncrementalInline) {
399         set_msg("NodeCountInliningCutoff");
400         return false;
401       } else {
402         should_delay = true;
403       }
404     }
405 
406     if (!UseInterpreter &&
407         is_init_with_ea(callee_method, caller_method, C)) {
408       // Escape Analysis stress testing when running Xcomp:
409       // inline constructors even if they are not reached.
410     } else if (forced_inline()) {
411       // Inlining was forced by CompilerOracle, ciReplay or annotation
412     } else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) {
413       // don't inline unreached call sites
414        set_msg("call site not reached");
415        return false;
416     }
417   }
418 
419   if (!C->do_inlining() && InlineAccessors) {
420     set_msg("not an accessor");
421     return false;
422   }
423 
424   // Limit inlining depth in case inlining is forced or
425   // _max_inline_level was increased to compensate for lambda forms.
426   if (inline_level() > MaxForceInlineLevel) {
427     set_msg("MaxForceInlineLevel");
428     return false;
429   }
430   if (inline_level() > _max_inline_level) {
431     if (!callee_method->force_inline() || !IncrementalInline) {
432       set_msg("inlining too deep");
433       return false;
434     } else if (!C->inlining_incrementally()) {
435       should_delay = true;
436     }
437   }
438 
439   // detect direct and indirect recursive inlining
440   {
441     // count the current method and the callee
442     const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
443     int inline_level = 0;
444     if (!is_compiled_lambda_form) {
445       if (method() == callee_method) {
446         inline_level++;
447       }
448     }
449     // count callers of current method and callee
450     Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : nullptr;
451     for (JVMState* j = jvms->caller(); j != nullptr && j->has_method(); j = j->caller()) {
452       if (j->method() == callee_method) {
453         if (is_compiled_lambda_form) {
454           // Since compiled lambda forms are heavily reused we allow recursive inlining.  If it is truly
455           // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
456           // compiler stack.
457           Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
458           if (caller_argument0 == callee_argument0) {
459             inline_level++;
460           }
461         } else {
462           inline_level++;
463         }
464       }
465     }
466     if (inline_level > MaxRecursiveInlineLevel) {
467       set_msg("recursive inlining is too deep");
468       return false;
469     }
470   }
471 
472   int size = callee_method->code_size_for_inlining();
473 
474   if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
475     if (!callee_method->force_inline() || !IncrementalInline) {
476       set_msg("size > DesiredMethodLimit");
477       return false;
478     } else if (!C->inlining_incrementally()) {
479       should_delay = true;
480     }
481   }
482 
483   // ok, inline this method
484   return true;
485 }
486 
487 //------------------------------pass_initial_checks----------------------------
488 bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
489   // Check if a callee_method was suggested
490   if (callee_method == nullptr) {
491     return false;
492   }
493   ciInstanceKlass *callee_holder = callee_method->holder();
494   // Check if klass of callee_method is loaded
495   if (!callee_holder->is_loaded()) {
496     return false;
497   }
498   if (!callee_holder->is_initialized() &&
499       // access allowed in the context of static initializer
500       C->needs_clinit_barrier(callee_holder, caller_method)) {
501     return false;
502   }
503   if( !UseInterpreter ) /* running Xcomp */ {
504     // Checks that constant pool's call site has been visited
505     // stricter than callee_holder->is_initialized()
506     ciBytecodeStream iter(caller_method);
507     iter.force_bci(caller_bci);
508     Bytecodes::Code call_bc = iter.cur_bc();
509     // An invokedynamic instruction does not have a klass.
510     if (call_bc != Bytecodes::_invokedynamic) {
511       int index = iter.get_index_u2();
512       if (!caller_method->is_klass_loaded(index, call_bc, true)) {
513         return false;
514       }
515       // Try to do constant pool resolution if running Xcomp
516       if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
517         return false;
518       }
519     }
520   }
521   return true;
522 }
523 
524 //------------------------------check_can_parse--------------------------------
525 const char* InlineTree::check_can_parse(ciMethod* callee) {
526   // Certain methods cannot be parsed at all:
527   if ( callee->is_native())                     return "native method";
528   if ( callee->is_abstract())                   return "abstract method";
529   if (!callee->has_balanced_monitors())         return "not compilable (unbalanced monitors)";
530   if ( callee->get_flow_analysis()->failing())  return "not compilable (flow analysis failed)";
531   if (!callee->can_be_parsed())                 return "cannot be parsed";
532   return nullptr;
533 }
534 
535 //------------------------------print_inlining---------------------------------
536 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,
537                                 ciMethod* caller_method, bool success) const {
538   const char* inline_msg = msg();
539   assert(inline_msg != nullptr, "just checking");
540   if (C->log() != nullptr) {
541     if (success) {
542       C->log()->inline_success(inline_msg);
543     } else {
544       C->log()->inline_fail(inline_msg);
545     }
546   }
547   CompileTask::print_inlining_ul(callee_method, inline_level(),
548                                  caller_bci, inlining_result_of(success), inline_msg);
549   if (C->print_inlining()) {
550     C->print_inlining(callee_method, inline_level(), caller_bci, inlining_result_of(success), inline_msg);
551     guarantee(callee_method != nullptr, "would crash in CompilerEvent::InlineEvent::post");
552     if (Verbose) {
553       const InlineTree *top = this;
554       while (top->caller_tree() != nullptr) { top = top->caller_tree(); }
555       //tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
556     }
557   }
558   EventCompilerInlining event;
559   if (event.should_commit()) {
560     CompilerEvent::InlineEvent::post(event, C->compile_id(), caller_method->get_Method(), callee_method, success, inline_msg, caller_bci);
561   }
562 }
563 
564 //------------------------------ok_to_inline-----------------------------------
565 bool InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile,
566                               bool& should_delay) {
567 #ifdef ASSERT
568   assert(callee_method != nullptr, "caller checks for optimized virtual!");
569   // Make sure the incoming jvms has the same information content as me.
570   // This means that we can eventually make this whole class AllStatic.
571   if (jvms->caller() == nullptr) {
572     assert(_caller_jvms == nullptr, "redundant instance state");
573   } else {
574     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
575   }
576   assert(_method == jvms->method(), "redundant instance state");
577 #endif
578   int         caller_bci    = jvms->bci();
579   ciMethod*   caller_method = jvms->method();
580 
581   // Do some initial checks.
582   if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
583     set_msg("failed initial checks");
584     print_inlining(callee_method, caller_bci, caller_method, false /* !success */);
585     return false;
586   }
587 
588   // Do some parse checks.
589   set_msg(check_can_parse(callee_method));
590   if (msg() != nullptr) {
591     print_inlining(callee_method, caller_bci, caller_method, false /* !success */);
592     return false;
593   }
594 
595   // Check if inlining policy says no.
596   bool success = try_to_inline(callee_method, caller_method, caller_bci, jvms, profile,
597                                should_delay); // out
598   if (success) {
599     // Inline!
600     if (msg() == nullptr) {
601       set_msg("inline (hot)");
602     }
603     print_inlining(callee_method, caller_bci, caller_method, true /* success */);
604     InlineTree* callee_tree = build_inline_tree_for_callee(callee_method, jvms, caller_bci);
605     if (should_delay) {
606       // Record late inlining decision in order to dump it for compiler replay
607       callee_tree->set_late_inline();
608     }
609     return true;
610   } else {
611     // Do not inline
612     if (msg() == nullptr) {
613       set_msg("too cold to inline");
614     }
615     print_inlining(callee_method, caller_bci, caller_method, false /* !success */ );
616     return false;
617   }
618 }
619 
620 //------------------------------build_inline_tree_for_callee-------------------
621 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
622   // Attempt inlining.
623   InlineTree* old_ilt = callee_at(caller_bci, callee_method);
624   if (old_ilt != nullptr) {
625     return old_ilt;
626   }
627   int max_inline_level_adjust = 0;
628   if (caller_jvms->method() != nullptr) {
629     if (caller_jvms->method()->is_compiled_lambda_form()) {
630       max_inline_level_adjust += 1;  // don't count actions in MH or indy adapter frames
631     } else if (callee_method->is_method_handle_intrinsic() ||
632                callee_method->is_compiled_lambda_form()) {
633       max_inline_level_adjust += 1;  // don't count method handle calls from java.lang.invoke implementation
634     }
635     if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
636       CompileTask::print_inline_indent(inline_level());
637       tty->print_cr(" \\-> discounting inline depth");
638     }
639     if (max_inline_level_adjust != 0 && C->log()) {
640       int id1 = C->log()->identify(caller_jvms->method());
641       int id2 = C->log()->identify(callee_method);
642       C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
643     }
644   }
645   // Allocate in the comp_arena to make sure the InlineTree is live when dumping a replay compilation file
646   InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, _max_inline_level + max_inline_level_adjust);
647   _subtrees.append(ilt);
648 
649   NOT_PRODUCT( _count_inlines += 1; )
650 
651   return ilt;
652 }
653 
654 
655 //---------------------------------------callee_at-----------------------------
656 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
657   for (int i = 0; i < _subtrees.length(); i++) {
658     InlineTree* sub = _subtrees.at(i);
659     if (sub->caller_bci() == bci && callee == sub->method()) {
660       return sub;
661     }
662   }
663   return nullptr;
664 }
665 
666 
667 //------------------------------build_inline_tree_root-------------------------
668 InlineTree *InlineTree::build_inline_tree_root() {
669   Compile* C = Compile::current();
670 
671   // Root of inline tree
672   InlineTree* ilt = new InlineTree(C, nullptr, C->method(), nullptr, -1, MaxInlineLevel);
673 
674   return ilt;
675 }
676 
677 
678 //-------------------------find_subtree_from_root-----------------------------
679 // Given a jvms, which determines a call chain from the root method,
680 // find the corresponding inline tree.
681 // Note: This method will be removed or replaced as InlineTree goes away.
682 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
683   InlineTree* iltp = root;
684   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
685   for (uint d = 1; d <= depth; d++) {
686     JVMState* jvmsp  = jvms->of_depth(d);
687     // Select the corresponding subtree for this bci.
688     assert(jvmsp->method() == iltp->method(), "tree still in sync");
689     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
690     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
691     if (sub == nullptr) {
692       if (d == depth) {
693         sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
694       }
695       guarantee(sub != nullptr, "should be a sub-ilt here");
696       return sub;
697     }
698     iltp = sub;
699   }
700   return iltp;
701 }
702 
703 // Count number of nodes in this subtree
704 int InlineTree::count() const {
705   int result = 1;
706   for (int i = 0 ; i < _subtrees.length(); i++) {
707     result += _subtrees.at(i)->count();
708   }
709   return result;
710 }
711 
712 void InlineTree::dump_replay_data(outputStream* out, int depth_adjust) {
713   out->print(" %d %d %d ", inline_level() + depth_adjust, caller_bci(), _late_inline);
714   method()->dump_name_as_ascii(out);
715   for (int i = 0 ; i < _subtrees.length(); i++) {
716     _subtrees.at(i)->dump_replay_data(out, depth_adjust);
717   }
718 }
719 
720 
721 #ifndef PRODUCT
722 void InlineTree::print_impl(outputStream* st, int indent) const {
723   for (int i = 0; i < indent; i++) st->print(" ");
724   st->print(" @ %d", caller_bci());
725   method()->print_short_name(st);
726   st->cr();
727 
728   for (int i = 0 ; i < _subtrees.length(); i++) {
729     _subtrees.at(i)->print_impl(st, indent + 2);
730   }
731 }
732 
733 void InlineTree::print_value_on(outputStream* st) const {
734   print_impl(st, 2);
735 }
736 #endif