1 /*
  2  * Copyright (c) 1998, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciReplay.hpp"
 27 #include "classfile/vmSymbols.hpp"
 28 #include "compiler/compilationPolicy.hpp"
 29 #include "compiler/compileBroker.hpp"
 30 #include "compiler/compilerEvent.hpp"
 31 #include "compiler/compileLog.hpp"
 32 #include "interpreter/linkResolver.hpp"
 33 #include "jfr/jfrEvents.hpp"
 34 #include "oops/objArrayKlass.hpp"
 35 #include "opto/callGenerator.hpp"
 36 #include "opto/parse.hpp"
 37 #include "runtime/handles.inline.hpp"
 38 #include "utilities/events.hpp"
 39 
 40 //=============================================================================
 41 //------------------------------InlineTree-------------------------------------
 42 InlineTree::InlineTree(Compile* c,
 43                        const InlineTree *caller_tree, ciMethod* callee,
 44                        JVMState* caller_jvms, int caller_bci,
 45                        int max_inline_level) :
 46   C(c),
 47   _caller_jvms(nullptr),
 48   _method(callee),
 49   _late_inline(false),
 50   _caller_tree((InlineTree*) caller_tree),
 51   _count_inline_bcs(method()->code_size_for_inlining()),
 52   _max_inline_level(max_inline_level),
 53   _subtrees(c->comp_arena(), 2, 0, nullptr),
 54   _msg(nullptr)
 55 {
 56 #ifndef PRODUCT
 57   _count_inlines = 0;
 58   _forced_inline = false;
 59 #endif
 60   if (caller_jvms != nullptr) {
 61     // Keep a private copy of the caller_jvms:
 62     _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
 63     _caller_jvms->set_bci(caller_jvms->bci());
 64     assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
 65     assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
 66   }
 67   assert((caller_tree == nullptr ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
 68   assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
 69   // Update hierarchical counts, count_inline_bcs() and count_inlines()
 70   InlineTree *caller = (InlineTree *)caller_tree;
 71   for( ; caller != nullptr; caller = ((InlineTree *)(caller->caller_tree())) ) {
 72     caller->_count_inline_bcs += count_inline_bcs();
 73     NOT_PRODUCT(caller->_count_inlines++;)
 74   }
 75 }
 76 
 77 /**
 78  *  Return true when EA is ON and a java constructor is called or
 79  *  a super constructor is called from an inlined java constructor.
 80  *  Also return true for boxing methods.
 81  *  Also return true for methods returning Iterator (including Iterable::iterator())
 82  *  that is essential for forall-loops performance.
 83  */
 84 static bool is_init_with_ea(ciMethod* callee_method,
 85                             ciMethod* caller_method, Compile* C) {
 86   if (!C->do_escape_analysis() || !EliminateAllocations) {
 87     return false; // EA is off
 88   }
 89   if (callee_method->is_object_initializer()) {
 90     return true; // constructor
 91   }
 92   if (caller_method->is_object_initializer() &&
 93       caller_method != C->method() &&
 94       caller_method->holder()->is_subclass_of(callee_method->holder())) {
 95     return true; // super constructor is called from inlined constructor
 96   }
 97   if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
 98     return true;
 99   }
100   ciType *retType = callee_method->signature()->return_type();
101   ciKlass *iter = C->env()->Iterator_klass();
102   if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) {
103     return true;
104   }
105   return false;
106 }
107 
108 /**
109  *  Force inlining unboxing accessor.
110  */
111 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
112   return C->eliminate_boxing() && callee_method->is_unboxing_method();
113 }
114 
115 // positive filter: should callee be inlined?
116 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
117                                int caller_bci, bool& should_delay, ciCallProfile& profile) {
118   // Allows targeted inlining
119   if (C->directive()->should_inline(callee_method)) {
120     set_msg("force inline by CompileCommand");
121     _forced_inline = true;
122     return true;
123   }
124 
125   if (callee_method->force_inline()) {
126     set_msg("force inline by annotation");
127     _forced_inline = true;
128     return true;
129   }
130 
131   int inline_depth = inline_level() + 1;
132   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
133     if (should_delay) {
134       set_msg("force (incremental) inline by ciReplay");
135     } else {
136       set_msg("force inline by ciReplay");
137     }
138     _forced_inline = true;
139     return true;
140   }
141 
142   int size = callee_method->code_size_for_inlining();
143 
144   // Check for too many throws (and not too huge)
145   if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
146      size < InlineThrowMaxSize ) {
147     if (C->print_inlining() && Verbose) {
148       CompileTask::print_inline_indent(inline_level());
149       tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
150     }
151     set_msg("many throws");
152     return true;
153   }
154 
155   int default_max_inline_size = C->max_inline_size();
156   int inline_small_code_size  = InlineSmallCode / 4;
157   int max_inline_size         = default_max_inline_size;
158 
159   int call_site_count  = caller_method->scale_count(profile.count());
160   int invoke_count     = caller_method->interpreter_invocation_count();
161 
162   assert(invoke_count != 0, "require invocation count greater than zero");
163   double freq = (double)call_site_count / (double)invoke_count;
164 
165   // bump the max size if the call is frequent
166   if ((freq >= InlineFrequencyRatio) ||
167       is_unboxing_method(callee_method, C) ||
168       is_init_with_ea(callee_method, caller_method, C)) {
169 
170     max_inline_size = C->freq_inline_size();
171     if (size <= max_inline_size && TraceFrequencyInlining) {
172       CompileTask::print_inline_indent(inline_level());
173       tty->print_cr("Inlined frequent method (freq=%lf):", freq);
174       CompileTask::print_inline_indent(inline_level());
175       callee_method->print();
176       tty->cr();
177     }
178   } else {
179     // Not hot.  Check for medium-sized pre-existing nmethod at cold sites.
180     if (callee_method->has_compiled_code() &&
181         callee_method->inline_instructions_size() > inline_small_code_size) {
182       set_msg("already compiled into a medium method");
183       return false;
184     }
185   }
186   if (size > max_inline_size) {
187     if (max_inline_size > default_max_inline_size) {
188       set_msg("hot method too big");
189     } else {
190       set_msg("too big");
191     }
192     return false;
193   }
194   return true;
195 }
196 
197 
198 // negative filter: should callee NOT be inlined?
199 bool InlineTree::should_not_inline(ciMethod* callee_method, ciMethod* caller_method,
200                                    int caller_bci, bool& should_delay, ciCallProfile& profile) {
201   const char* fail_msg = nullptr;
202 
203   // First check all inlining restrictions which are required for correctness
204   if (callee_method->is_abstract()) {
205     fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
206   } else if (!callee_method->holder()->is_initialized() &&
207              // access allowed in the context of static initializer
208              C->needs_clinit_barrier(callee_method->holder(), caller_method)) {
209     fail_msg = "method holder not initialized";
210   } else if (callee_method->is_native()) {
211     fail_msg = "native method";
212   } else if (callee_method->dont_inline()) {
213     fail_msg = "don't inline by annotation";
214   }
215 
216   // Don't inline a method that changes Thread.currentThread() except
217   // into another method that is annotated @ChangesCurrentThread.
218   if (callee_method->changes_current_thread()
219       && ! C->method()->changes_current_thread()) {
220     fail_msg = "method changes current thread";
221   }
222 
223   // one more inlining restriction
224   if (fail_msg == nullptr && callee_method->has_unloaded_classes_in_signature()) {
225     fail_msg = "unloaded signature classes";
226   }
227 
228   if (fail_msg != nullptr) {
229     set_msg(fail_msg);
230     return true;
231   }
232 
233   // ignore heuristic controls on inlining
234   if (C->directive()->should_inline(callee_method)) {
235     set_msg("force inline by CompileCommand");
236     return false;
237   }
238 
239   if (C->directive()->should_not_inline(callee_method)) {
240     set_msg("disallowed by CompileCommand");
241     return true;
242   }
243 
244   int inline_depth = inline_level() + 1;
245   if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
246     if (should_delay) {
247       set_msg("force (incremental) inline by ciReplay");
248     } else {
249       set_msg("force inline by ciReplay");
250     }
251     return false;
252   }
253 
254   if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
255     set_msg("disallowed by ciReplay");
256     return true;
257   }
258 
259   if (ciReplay::should_not_inline(callee_method)) {
260     set_msg("disallowed by ciReplay");
261     return true;
262   }
263 
264   if (callee_method->force_inline()) {
265     set_msg("force inline by annotation");
266     return false;
267   }
268 
269   // Now perform checks which are heuristic
270 
271   if (is_unboxing_method(callee_method, C)) {
272     // Inline unboxing methods.
273     return false;
274   }
275 
276   if (callee_method->has_compiled_code() &&
277       callee_method->inline_instructions_size() > InlineSmallCode) {
278     set_msg("already compiled into a big method");
279     return true;
280   }
281 
282   // don't inline exception code unless the top method belongs to an
283   // exception class
284   if (caller_tree() != nullptr &&
285       callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
286     const InlineTree *top = this;
287     while (top->caller_tree() != nullptr) top = top->caller_tree();
288     ciInstanceKlass* k = top->method()->holder();
289     if (!k->is_subclass_of(C->env()->Throwable_klass())) {
290       set_msg("exception method");
291       return true;
292     }
293   }
294 
295   // use frequency-based objections only for non-trivial methods
296   if (callee_method->code_size() <= MaxTrivialSize) {
297     return false;
298   }
299 
300   // don't use counts with -Xcomp
301   if (UseInterpreter) {
302     if (!callee_method->has_compiled_code() &&
303         !callee_method->was_executed_more_than(0)) {
304       set_msg("never executed");
305       return true;
306     }
307 
308     if (is_init_with_ea(callee_method, caller_method, C)) {
309       // Escape Analysis: inline all executed constructors
310       return false;
311     }
312 
313     if (MinInlineFrequencyRatio > 0) {
314       int call_site_count  = caller_method->scale_count(profile.count());
315       int invoke_count     = caller_method->interpreter_invocation_count();
316       assert(invoke_count != 0, "require invocation count greater than zero");
317       double freq = (double)call_site_count / (double)invoke_count;
318       // avoid division by 0, set divisor to at least 1
319       int cp_min_inv = MAX2(1, CompilationPolicy::min_invocations());
320       double min_freq = MAX2(MinInlineFrequencyRatio, 1.0 / cp_min_inv);
321 
322       if (freq < min_freq) {
323         set_msg("low call site frequency");
324         return true;
325       }
326     }
327   }
328 
329   return false;
330 }
331 
332 bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) {
333   if (!UseInterpreter) {
334     return false; // -Xcomp
335   }
336   if (profile.count() > 0) {
337     return false; // reachable according to profile
338   }
339   if (!callee_method->was_executed_more_than(0)) {
340     return true; // callee was never executed
341   }
342   if (caller_method->is_not_reached(caller_bci)) {
343     return true; // call site not resolved
344   }
345   if (profile.count() == -1) {
346     return false; // immature profile; optimistically treat as reached
347   }
348   assert(profile.count() == 0, "sanity");
349 
350   // Profile info is scarce.
351   // Try to guess: check if the call site belongs to a start block.
352   // Call sites in a start block should be reachable if no exception is thrown earlier.
353   ciMethodBlocks* caller_blocks = caller_method->get_method_blocks();
354   bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0;
355   if (is_start_block) {
356     return false; // treat the call reached as part of start block
357   }
358   return true; // give up and treat the call site as not reached
359 }
360 
361 //-----------------------------try_to_inline-----------------------------------
362 // return true if ok
363 // Relocated from "InliningClosure::try_to_inline"
364 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
365                                int caller_bci, JVMState* jvms, ciCallProfile& profile,
366                                bool& should_delay) {
367 
368   if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
369     if (!callee_method->force_inline() || !IncrementalInline) {
370       set_msg("size > DesiredMethodLimit");
371       return false;
372     } else if (!C->inlining_incrementally()) {
373       should_delay = true;
374     }
375   }
376 
377   _forced_inline = false; // Reset
378 
379   // 'should_delay' can be overridden during replay compilation
380   if (!should_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
381     return false;
382   }
383   // 'should_delay' can be overridden during replay compilation
384   if (should_not_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
385     return false;
386   }
387 
388   if (InlineAccessors && callee_method->is_accessor()) {
389     // accessor methods are not subject to any of the following limits.
390     set_msg("accessor");
391     return true;
392   }
393 
394   // suppress a few checks for accessors and trivial methods
395   if (callee_method->code_size() > MaxTrivialSize) {
396 
397     // don't inline into giant methods
398     if (C->over_inlining_cutoff()) {
399       if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
400           || !IncrementalInline) {
401         set_msg("NodeCountInliningCutoff");
402         return false;
403       } else {
404         should_delay = true;
405       }
406     }
407 
408     if (!UseInterpreter &&
409         is_init_with_ea(callee_method, caller_method, C)) {
410       // Escape Analysis stress testing when running Xcomp:
411       // inline constructors even if they are not reached.
412     } else if (forced_inline()) {
413       // Inlining was forced by CompilerOracle, ciReplay or annotation
414     } else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) {
415       // don't inline unreached call sites
416        set_msg("call site not reached");
417        return false;
418     }
419   }
420 
421   if (!C->do_inlining() && InlineAccessors) {
422     set_msg("not an accessor");
423     return false;
424   }
425 
426   // Limit inlining depth in case inlining is forced or
427   // _max_inline_level was increased to compensate for lambda forms.
428   if (inline_level() > MaxForceInlineLevel) {
429     set_msg("MaxForceInlineLevel");
430     return false;
431   }
432   if (inline_level() > _max_inline_level) {
433     if (!callee_method->force_inline() || !IncrementalInline) {
434       set_msg("inlining too deep");
435       return false;
436     } else if (!C->inlining_incrementally()) {
437       should_delay = true;
438     }
439   }
440 
441   // detect direct and indirect recursive inlining
442   {
443     // count the current method and the callee
444     const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
445     int inline_level = 0;
446     if (!is_compiled_lambda_form) {
447       if (method() == callee_method) {
448         inline_level++;
449       }
450     }
451     // count callers of current method and callee
452     Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : nullptr;
453     for (JVMState* j = jvms->caller(); j != nullptr && j->has_method(); j = j->caller()) {
454       if (j->method() == callee_method) {
455         if (is_compiled_lambda_form) {
456           // Since compiled lambda forms are heavily reused we allow recursive inlining.  If it is truly
457           // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
458           // compiler stack.
459           Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
460           if (caller_argument0 == callee_argument0) {
461             inline_level++;
462           }
463         } else {
464           inline_level++;
465         }
466       }
467     }
468     if (inline_level > MaxRecursiveInlineLevel) {
469       set_msg("recursive inlining is too deep");
470       return false;
471     }
472   }
473 
474   int size = callee_method->code_size_for_inlining();
475 
476   if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
477     if (!callee_method->force_inline() || !IncrementalInline) {
478       set_msg("size > DesiredMethodLimit");
479       return false;
480     } else if (!C->inlining_incrementally()) {
481       should_delay = true;
482     }
483   }
484 
485   // ok, inline this method
486   return true;
487 }
488 
489 //------------------------------pass_initial_checks----------------------------
490 bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
491   // Check if a callee_method was suggested
492   if (callee_method == nullptr) {
493     return false;
494   }
495   ciInstanceKlass *callee_holder = callee_method->holder();
496   // Check if klass of callee_method is loaded
497   if (!callee_holder->is_loaded()) {
498     return false;
499   }
500   if (!callee_holder->is_initialized() &&
501       // access allowed in the context of static initializer
502       C->needs_clinit_barrier(callee_holder, caller_method)) {
503     return false;
504   }
505   if( !UseInterpreter ) /* running Xcomp */ {
506     // Checks that constant pool's call site has been visited
507     // stricter than callee_holder->is_initialized()
508     ciBytecodeStream iter(caller_method);
509     iter.force_bci(caller_bci);
510     Bytecodes::Code call_bc = iter.cur_bc();
511     // An invokedynamic instruction does not have a klass.
512     if (call_bc != Bytecodes::_invokedynamic) {
513       int index = iter.get_index_u2();
514       if (!caller_method->is_klass_loaded(index, call_bc, true)) {
515         return false;
516       }
517       // Try to do constant pool resolution if running Xcomp
518       if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
519         return false;
520       }
521     }
522   }
523   return true;
524 }
525 
526 //------------------------------check_can_parse--------------------------------
527 const char* InlineTree::check_can_parse(ciMethod* callee) {
528   // Certain methods cannot be parsed at all:
529   if ( callee->is_native())                     return "native method";
530   if ( callee->is_abstract())                   return "abstract method";
531   if (!callee->has_balanced_monitors())         return "not compilable (unbalanced monitors)";
532   if ( callee->get_flow_analysis()->failing())  return "not compilable (flow analysis failed)";
533   if (!callee->can_be_parsed())                 return "cannot be parsed";
534   return nullptr;
535 }
536 
537 //------------------------------print_inlining---------------------------------
538 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,
539                                 ciMethod* caller_method, bool success) const {
540   const char* inline_msg = msg();
541   assert(inline_msg != nullptr, "just checking");
542   if (C->log() != nullptr) {
543     if (success) {
544       C->log()->inline_success(inline_msg);
545     } else {
546       C->log()->inline_fail(inline_msg);
547     }
548   }
549   CompileTask::print_inlining_ul(callee_method, inline_level(),
550                                  caller_bci, inlining_result_of(success), inline_msg);
551   if (C->print_inlining()) {
552     C->print_inlining(callee_method, inline_level(), caller_bci, inlining_result_of(success), inline_msg);
553     guarantee(callee_method != nullptr, "would crash in CompilerEvent::InlineEvent::post");
554     if (Verbose) {
555       const InlineTree *top = this;
556       while (top->caller_tree() != nullptr) { top = top->caller_tree(); }
557       //tty->print("  bcs: %d+%d  invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
558     }
559   }
560   EventCompilerInlining event;
561   if (event.should_commit()) {
562     CompilerEvent::InlineEvent::post(event, C->compile_id(), caller_method->get_Method(), callee_method, success, inline_msg, caller_bci);
563   }
564 }
565 
566 //------------------------------ok_to_inline-----------------------------------
567 bool InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile,
568                               bool& should_delay) {
569 #ifdef ASSERT
570   assert(callee_method != nullptr, "caller checks for optimized virtual!");
571   // Make sure the incoming jvms has the same information content as me.
572   // This means that we can eventually make this whole class AllStatic.
573   if (jvms->caller() == nullptr) {
574     assert(_caller_jvms == nullptr, "redundant instance state");
575   } else {
576     assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
577   }
578   assert(_method == jvms->method(), "redundant instance state");
579 #endif
580   int         caller_bci    = jvms->bci();
581   ciMethod*   caller_method = jvms->method();
582 
583   // Do some initial checks.
584   if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
585     set_msg("failed initial checks");
586     print_inlining(callee_method, caller_bci, caller_method, false /* !success */);
587     return false;
588   }
589 
590   // Do some parse checks.
591   set_msg(check_can_parse(callee_method));
592   if (msg() != nullptr) {
593     print_inlining(callee_method, caller_bci, caller_method, false /* !success */);
594     return false;
595   }
596 
597   // Check if inlining policy says no.
598   bool success = try_to_inline(callee_method, caller_method, caller_bci, jvms, profile,
599                                should_delay); // out
600   if (success) {
601     // Inline!
602     if (msg() == nullptr) {
603       set_msg("inline (hot)");
604     }
605     print_inlining(callee_method, caller_bci, caller_method, true /* success */);
606     InlineTree* callee_tree = build_inline_tree_for_callee(callee_method, jvms, caller_bci);
607     if (should_delay) {
608       // Record late inlining decision in order to dump it for compiler replay
609       callee_tree->set_late_inline();
610     }
611     return true;
612   } else {
613     // Do not inline
614     if (msg() == nullptr) {
615       set_msg("too cold to inline");
616     }
617     print_inlining(callee_method, caller_bci, caller_method, false /* !success */ );
618     return false;
619   }
620 }
621 
622 //------------------------------build_inline_tree_for_callee-------------------
623 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
624   // Attempt inlining.
625   InlineTree* old_ilt = callee_at(caller_bci, callee_method);
626   if (old_ilt != nullptr) {
627     return old_ilt;
628   }
629   int max_inline_level_adjust = 0;
630   if (caller_jvms->method() != nullptr) {
631     if (caller_jvms->method()->is_compiled_lambda_form()) {
632       max_inline_level_adjust += 1;  // don't count actions in MH or indy adapter frames
633     } else if (callee_method->is_method_handle_intrinsic() ||
634                callee_method->is_compiled_lambda_form()) {
635       max_inline_level_adjust += 1;  // don't count method handle calls from java.lang.invoke implementation
636     }
637     if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
638       CompileTask::print_inline_indent(inline_level());
639       tty->print_cr(" \\-> discounting inline depth");
640     }
641     if (max_inline_level_adjust != 0 && C->log()) {
642       int id1 = C->log()->identify(caller_jvms->method());
643       int id2 = C->log()->identify(callee_method);
644       C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
645     }
646   }
647   // Allocate in the comp_arena to make sure the InlineTree is live when dumping a replay compilation file
648   InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, _max_inline_level + max_inline_level_adjust);
649   _subtrees.append(ilt);
650 
651   NOT_PRODUCT( _count_inlines += 1; )
652 
653   return ilt;
654 }
655 
656 
657 //---------------------------------------callee_at-----------------------------
658 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
659   for (int i = 0; i < _subtrees.length(); i++) {
660     InlineTree* sub = _subtrees.at(i);
661     if (sub->caller_bci() == bci && callee == sub->method()) {
662       return sub;
663     }
664   }
665   return nullptr;
666 }
667 
668 
669 //------------------------------build_inline_tree_root-------------------------
670 InlineTree *InlineTree::build_inline_tree_root() {
671   Compile* C = Compile::current();
672 
673   // Root of inline tree
674   InlineTree* ilt = new InlineTree(C, nullptr, C->method(), nullptr, -1, MaxInlineLevel);
675 
676   return ilt;
677 }
678 
679 
680 //-------------------------find_subtree_from_root-----------------------------
681 // Given a jvms, which determines a call chain from the root method,
682 // find the corresponding inline tree.
683 // Note: This method will be removed or replaced as InlineTree goes away.
684 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
685   InlineTree* iltp = root;
686   uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
687   for (uint d = 1; d <= depth; d++) {
688     JVMState* jvmsp  = jvms->of_depth(d);
689     // Select the corresponding subtree for this bci.
690     assert(jvmsp->method() == iltp->method(), "tree still in sync");
691     ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
692     InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
693     if (sub == nullptr) {
694       if (d == depth) {
695         sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
696       }
697       guarantee(sub != nullptr, "should be a sub-ilt here");
698       return sub;
699     }
700     iltp = sub;
701   }
702   return iltp;
703 }
704 
705 // Count number of nodes in this subtree
706 int InlineTree::count() const {
707   int result = 1;
708   for (int i = 0 ; i < _subtrees.length(); i++) {
709     result += _subtrees.at(i)->count();
710   }
711   return result;
712 }
713 
714 void InlineTree::dump_replay_data(outputStream* out, int depth_adjust) {
715   out->print(" %d %d %d ", inline_level() + depth_adjust, caller_bci(), _late_inline);
716   method()->dump_name_as_ascii(out);
717   for (int i = 0 ; i < _subtrees.length(); i++) {
718     _subtrees.at(i)->dump_replay_data(out, depth_adjust);
719   }
720 }
721 
722 
723 #ifndef PRODUCT
724 void InlineTree::print_impl(outputStream* st, int indent) const {
725   for (int i = 0; i < indent; i++) st->print(" ");
726   st->print(" @ %d", caller_bci());
727   method()->print_short_name(st);
728   st->cr();
729 
730   for (int i = 0 ; i < _subtrees.length(); i++) {
731     _subtrees.at(i)->print_impl(st, indent + 2);
732   }
733 }
734 
735 void InlineTree::print_value_on(outputStream* st) const {
736   print_impl(st, 2);
737 }
738 #endif