1 /*
2 * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciReplay.hpp"
26 #include "classfile/vmSymbols.hpp"
27 #include "compiler/compilationPolicy.hpp"
28 #include "compiler/compileBroker.hpp"
29 #include "compiler/compileLog.hpp"
30 #include "compiler/compilerEvent.hpp"
31 #include "interpreter/linkResolver.hpp"
32 #include "jfr/jfrEvents.hpp"
33 #include "oops/objArrayKlass.hpp"
34 #include "opto/callGenerator.hpp"
35 #include "opto/parse.hpp"
36 #include "runtime/handles.inline.hpp"
37 #include "utilities/events.hpp"
38
39 //=============================================================================
40 //------------------------------InlineTree-------------------------------------
41 InlineTree::InlineTree(Compile* c,
42 const InlineTree *caller_tree, ciMethod* callee,
43 JVMState* caller_jvms, int caller_bci,
44 int max_inline_level) :
45 C(c),
46 _caller_jvms(nullptr),
47 _method(callee),
48 _late_inline(false),
49 _caller_tree((InlineTree*) caller_tree),
50 _count_inline_bcs(method()->code_size_for_inlining()),
51 _max_inline_level(max_inline_level),
52 _subtrees(c->comp_arena(), 2, 0, nullptr),
53 _msg(nullptr)
54 {
55 #ifndef PRODUCT
56 _count_inlines = 0;
57 _forced_inline = false;
58 #endif
59 if (caller_jvms != nullptr) {
60 // Keep a private copy of the caller_jvms:
61 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
62 _caller_jvms->set_bci(caller_jvms->bci());
63 _caller_jvms->set_receiver_info(caller_jvms->receiver_info());
64 assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
65 assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
66 }
67 assert((caller_tree == nullptr ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
68 assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
69 // Update hierarchical counts, count_inline_bcs() and count_inlines()
70 InlineTree *caller = (InlineTree *)caller_tree;
71 for( ; caller != nullptr; caller = ((InlineTree *)(caller->caller_tree())) ) {
72 caller->_count_inline_bcs += count_inline_bcs();
73 NOT_PRODUCT(caller->_count_inlines++;)
74 }
75 }
76
77 /**
78 * Return true when EA is ON and a java constructor is called or
79 * a super constructor is called from an inlined java constructor.
80 * Also return true for boxing methods.
81 * Also return true for methods returning Iterator (including Iterable::iterator())
82 * that is essential for forall-loops performance.
83 */
84 static bool is_init_with_ea(ciMethod* callee_method,
85 ciMethod* caller_method, Compile* C) {
86 if (!C->do_escape_analysis() || !EliminateAllocations) {
87 return false; // EA is off
88 }
89 if (callee_method->is_object_initializer()) {
90 return true; // constructor
91 }
92 if (caller_method->is_object_initializer() &&
93 caller_method != C->method() &&
94 caller_method->holder()->is_subclass_of(callee_method->holder())) {
95 return true; // super constructor is called from inlined constructor
96 }
97 if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
98 return true;
99 }
100 ciType *retType = callee_method->signature()->return_type();
101 ciKlass *iter = C->env()->Iterator_klass();
102 if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) {
103 return true;
104 }
105 return false;
106 }
107
108 /**
109 * Force inlining unboxing accessor.
110 */
111 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
112 return C->eliminate_boxing() && callee_method->is_unboxing_method();
113 }
114
115 // positive filter: should callee be inlined?
116 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
117 JVMState* caller_jvms, bool& should_delay, ciCallProfile& profile) {
118 int caller_bci = caller_jvms->bci();
119 // Allows targeted inlining
120 if (C->directive()->should_inline(callee_method)) {
121 set_msg("force inline by CompileCommand");
122 _forced_inline = true;
123 return true;
124 }
125
126 if (callee_method->force_inline()) {
127 set_msg("force inline by annotation");
128 _forced_inline = true;
129 return true;
130 }
131
132 int inline_depth = inline_level() + 1;
133 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
134 if (should_delay) {
135 set_msg("force (incremental) inline by ciReplay");
136 } else {
137 set_msg("force inline by ciReplay");
138 }
139 _forced_inline = true;
140 return true;
141 }
142
143 int size = callee_method->code_size_for_inlining();
144
145 // Check for too many throws (and not too huge)
146 if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
147 size < InlineThrowMaxSize ) {
148 if (Verbose) {
149 outputStream* stream = C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS);
150 stream->print("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
151 }
152 set_msg("many throws");
153 return true;
154 }
155
156 int default_max_inline_size = C->max_inline_size();
157 int inline_small_code_size = InlineSmallCode / 4;
158 int max_inline_size = default_max_inline_size;
159
160 int call_site_count = caller_method->scale_count(profile.count());
161 int invoke_count = caller_method->interpreter_invocation_count();
162
163 assert(invoke_count != 0, "require invocation count greater than zero");
164 double freq = (double)call_site_count / (double)invoke_count;
165
166 // bump the max size if the call is frequent
167 if ((freq >= InlineFrequencyRatio) ||
168 is_unboxing_method(callee_method, C) ||
169 is_init_with_ea(callee_method, caller_method, C)) {
170
171 max_inline_size = C->freq_inline_size();
172 if (size <= max_inline_size && TraceFrequencyInlining) {
173 outputStream* stream = C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS);
174 stream->print("Inlined frequent method (freq=%lf):", freq);
175 }
176 } else {
177 // Not hot. Check for medium-sized pre-existing nmethod at cold sites.
178 if (callee_method->has_compiled_code() &&
179 callee_method->inline_instructions_size() > inline_small_code_size) {
180 set_msg("already compiled into a medium method");
181 return false;
182 }
183 }
184 if (size > max_inline_size) {
185 if (max_inline_size > default_max_inline_size) {
186 set_msg("hot method too big");
187 } else {
188 set_msg("too big");
189 }
190 return false;
191 }
192 return true;
193 }
194
195
196 // negative filter: should callee NOT be inlined?
197 bool InlineTree::should_not_inline(ciMethod* callee_method, ciMethod* caller_method,
198 int caller_bci, bool& should_delay, ciCallProfile& profile) {
199 const char* fail_msg = nullptr;
200
201 // First check all inlining restrictions which are required for correctness
202 if (callee_method->is_abstract()) {
203 fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
204 } else if (!callee_method->holder()->is_initialized() &&
205 // access allowed in the context of static initializer
206 C->needs_clinit_barrier(callee_method->holder(), caller_method)) {
207 fail_msg = "method holder not initialized";
208 } else if (callee_method->is_native()) {
209 fail_msg = "native method";
210 } else if (callee_method->dont_inline()) {
211 fail_msg = "don't inline by annotation";
212 }
213
214 // Don't inline a method that changes Thread.currentThread() except
215 // into another method that is annotated @ChangesCurrentThread.
216 if (callee_method->changes_current_thread()
217 && ! C->method()->changes_current_thread()) {
218 fail_msg = "method changes current thread";
219 }
220
221 // one more inlining restriction
222 if (fail_msg == nullptr && callee_method->has_unloaded_classes_in_signature()) {
223 fail_msg = "unloaded signature classes";
224 }
225
226 if (fail_msg != nullptr) {
227 set_msg(fail_msg);
228 return true;
229 }
230
231 // ignore heuristic controls on inlining
232 if (C->directive()->should_inline(callee_method)) {
233 set_msg("force inline by CompileCommand");
234 return false;
235 }
236
237 if (C->directive()->should_not_inline(callee_method)) {
238 set_msg("disallowed by CompileCommand");
239 return true;
240 }
241
242 int inline_depth = inline_level() + 1;
243 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
244 if (should_delay) {
245 set_msg("force (incremental) inline by ciReplay");
246 } else {
247 set_msg("force inline by ciReplay");
248 }
249 return false;
250 }
251
252 if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
253 set_msg("disallowed by ciReplay");
254 return true;
255 }
256
257 if (ciReplay::should_not_inline(callee_method)) {
258 set_msg("disallowed by ciReplay");
259 return true;
260 }
261
262 if (callee_method->force_inline()) {
263 set_msg("force inline by annotation");
264 return false;
265 }
266
267 // Now perform checks which are heuristic
268
269 if (is_unboxing_method(callee_method, C)) {
270 // Inline unboxing methods.
271 return false;
272 }
273
274 if (callee_method->has_compiled_code() &&
275 callee_method->inline_instructions_size() > InlineSmallCode) {
276 set_msg("already compiled into a big method");
277 return true;
278 }
279
280 // don't inline exception code unless the top method belongs to an
281 // exception class
282 if (caller_tree() != nullptr &&
283 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
284 const InlineTree *top = this;
285 while (top->caller_tree() != nullptr) top = top->caller_tree();
286 ciInstanceKlass* k = top->method()->holder();
287 if (!k->is_subclass_of(C->env()->Throwable_klass())) {
288 set_msg("exception method");
289 return true;
290 }
291 }
292
293 // use frequency-based objections only for non-trivial methods
294 if (callee_method->code_size() <= MaxTrivialSize) {
295 return false;
296 }
297
298 // don't use counts with -Xcomp
299 if (UseInterpreter) {
300 if (!callee_method->has_compiled_code() &&
301 !callee_method->was_executed_more_than(0)) {
302 set_msg("never executed");
303 return true;
304 }
305
306 if (is_init_with_ea(callee_method, caller_method, C)) {
307 // Escape Analysis: inline all executed constructors
308 return false;
309 }
310
311 if (MinInlineFrequencyRatio > 0) {
312 int call_site_count = caller_method->scale_count(profile.count());
313 int invoke_count = caller_method->interpreter_invocation_count();
314 assert(invoke_count != 0, "require invocation count greater than zero");
315 double freq = (double)call_site_count / (double)invoke_count;
316 // avoid division by 0, set divisor to at least 1
317 int cp_min_inv = MAX2(1, CompilationPolicy::min_invocations());
318 double min_freq = MAX2(MinInlineFrequencyRatio, 1.0 / cp_min_inv);
319
320 if (freq < min_freq) {
321 set_msg("low call site frequency");
322 return true;
323 }
324 }
325 }
326
327 return false;
328 }
329
330 bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) {
331 if (!UseInterpreter) {
332 return false; // -Xcomp
333 }
334 if (profile.count() > 0) {
335 return false; // reachable according to profile
336 }
337 if (!callee_method->was_executed_more_than(0)) {
338 return true; // callee was never executed
339 }
340 if (caller_method->is_not_reached(caller_bci)) {
341 return true; // call site not resolved
342 }
343 if (profile.count() == -1) {
344 return false; // immature profile; optimistically treat as reached
345 }
346 assert(profile.count() == 0, "sanity");
347
348 // Profile info is scarce.
349 // Try to guess: check if the call site belongs to a start block.
350 // Call sites in a start block should be reachable if no exception is thrown earlier.
351 ciMethodBlocks* caller_blocks = caller_method->get_method_blocks();
352 bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0;
353 if (is_start_block) {
354 return false; // treat the call reached as part of start block
355 }
356 return true; // give up and treat the call site as not reached
357 }
358
359 //-----------------------------try_to_inline-----------------------------------
360 // return true if ok
361 // Relocated from "InliningClosure::try_to_inline"
362 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
363 int caller_bci, JVMState* jvms, ciCallProfile& profile,
364 bool& should_delay) {
365
366 if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
367 if (!callee_method->force_inline() || !IncrementalInline) {
368 set_msg("size > DesiredMethodLimit");
369 return false;
370 } else if (!C->inlining_incrementally()) {
371 should_delay = true;
372 }
373 }
374
375 _forced_inline = false; // Reset
376
377 // 'should_delay' can be overridden during replay compilation
378 if (!should_inline(callee_method, caller_method, jvms, should_delay, profile)) {
379 return false;
380 }
381 // 'should_delay' can be overridden during replay compilation
382 if (should_not_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
383 return false;
384 }
385
386 if (InlineAccessors && callee_method->is_accessor()) {
387 // accessor methods are not subject to any of the following limits.
388 set_msg("accessor");
389 return true;
390 }
391
392 // suppress a few checks for accessors and trivial methods
393 if (callee_method->code_size() > MaxTrivialSize) {
394
395 // don't inline into giant methods
396 if (C->over_inlining_cutoff()) {
397 if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
398 || !IncrementalInline) {
399 set_msg("NodeCountInliningCutoff");
400 return false;
401 } else {
402 should_delay = true;
403 }
404 }
405
406 if (!UseInterpreter &&
407 is_init_with_ea(callee_method, caller_method, C)) {
408 // Escape Analysis stress testing when running Xcomp:
409 // inline constructors even if they are not reached.
410 } else if (forced_inline()) {
411 // Inlining was forced by CompilerOracle, ciReplay or annotation
412 } else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) {
413 // don't inline unreached call sites
414 set_msg("call site not reached");
415 return false;
416 }
417 }
418
419 if (!C->do_inlining() && InlineAccessors) {
420 set_msg("not an accessor");
421 return false;
422 }
423
424 // Limit inlining depth in case inlining is forced or
425 // _max_inline_level was increased to compensate for lambda forms.
426 if (inline_level() > MaxForceInlineLevel) {
427 set_msg("MaxForceInlineLevel");
428 return false;
429 }
430 if (inline_level() > _max_inline_level) {
431 if (!callee_method->force_inline() || !IncrementalInline) {
432 set_msg("inlining too deep");
433 return false;
434 } else if (!C->inlining_incrementally()) {
435 should_delay = true;
436 }
437 }
438
439 // detect direct and indirect recursive inlining
440 {
441 const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
442 const bool is_method_handle_invoker = is_compiled_lambda_form && !jvms->method()->is_compiled_lambda_form();
443
444 ciInstance* lform_callee_recv = nullptr;
445 if (is_compiled_lambda_form && !is_method_handle_invoker) { // MH invokers don't have a receiver
446 lform_callee_recv = jvms->compute_receiver_info(callee_method);
447 }
448
449 int inline_level = 0;
450 for (JVMState* j = jvms; j != nullptr && j->has_method(); j = j->caller()) {
451 if (j->method() == callee_method) {
452 // Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly
453 // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
454 // compiler stack.
455 if (lform_callee_recv != nullptr) {
456 ciInstance* lform_caller_recv = j->receiver_info();
457 assert(lform_caller_recv != nullptr || j->depth() == 1 ||
458 !j->caller()->method()->is_compiled_lambda_form(), // MH invoker
459 "missing receiver info");
460 if (lform_caller_recv == lform_callee_recv || lform_caller_recv == nullptr) {
461 inline_level++;
462 }
463 } else {
464 inline_level++;
465 }
466 }
467 }
468 if (inline_level > MaxRecursiveInlineLevel) {
469 set_msg("recursive inlining is too deep");
470 return false;
471 }
472 }
473
474 int size = callee_method->code_size_for_inlining();
475
476 if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
477 if (!callee_method->force_inline() || !IncrementalInline) {
478 set_msg("size > DesiredMethodLimit");
479 return false;
480 } else if (!C->inlining_incrementally()) {
481 should_delay = true;
482 }
483 }
484
485 // ok, inline this method
486 return true;
487 }
488
489 //------------------------------pass_initial_checks----------------------------
490 bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
491 // Check if a callee_method was suggested
492 if (callee_method == nullptr) {
493 return false;
494 }
495 ciInstanceKlass *callee_holder = callee_method->holder();
496 // Check if klass of callee_method is loaded
497 if (!callee_holder->is_loaded()) {
498 return false;
499 }
500 if (!callee_holder->is_initialized() &&
501 // access allowed in the context of static initializer
502 C->needs_clinit_barrier(callee_holder, caller_method)) {
503 return false;
504 }
505 if( !UseInterpreter ) /* running Xcomp */ {
506 // Checks that constant pool's call site has been visited
507 // stricter than callee_holder->is_initialized()
508 ciBytecodeStream iter(caller_method);
509 iter.force_bci(caller_bci);
510 Bytecodes::Code call_bc = iter.cur_bc();
511 // An invokedynamic instruction does not have a klass.
512 if (call_bc != Bytecodes::_invokedynamic) {
513 int index = iter.get_index_u2();
514 if (!caller_method->is_klass_loaded(index, call_bc, true)) {
515 return false;
516 }
517 // Try to do constant pool resolution if running Xcomp
518 if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
519 return false;
520 }
521 }
522 }
523 return true;
524 }
525
526 //------------------------------check_can_parse--------------------------------
527 const char* InlineTree::check_can_parse(ciMethod* callee) {
528 // Certain methods cannot be parsed at all:
529 if ( callee->is_native()) return "native method";
530 if ( callee->is_abstract()) return "abstract method";
531 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
532 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
533 if (!callee->can_be_parsed()) return "cannot be parsed";
534 return nullptr;
535 }
536
537 //------------------------------print_inlining---------------------------------
538 void InlineTree::print_inlining(ciMethod* callee_method, JVMState* jvm, bool success) const {
539 int caller_bci = jvm->bci();
540 ciMethod* caller_method = jvm->method();
541 const char* inline_msg = msg();
542 assert(inline_msg != nullptr, "just checking");
543 if (C->log() != nullptr) {
544 if (success) {
545 C->log()->inline_success(inline_msg);
546 } else {
547 C->log()->inline_fail(inline_msg);
548 }
549 }
550 CompileTask::print_inlining_ul(callee_method, inline_level(), caller_bci, inlining_result_of(success), inline_msg);
551 C->inline_printer()->record(callee_method, jvm, inlining_result_of(success), inline_msg);
552 EventCompilerInlining event;
553 if (event.should_commit()) {
554 guarantee(callee_method != nullptr, "would crash in CompilerEvent::InlineEvent::post");
555 CompilerEvent::InlineEvent::post(event, C->compile_id(), caller_method->get_Method(), callee_method, success, inline_msg, caller_bci);
556 }
557 }
558
559 //------------------------------ok_to_inline-----------------------------------
560 bool InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile,
561 bool& should_delay) {
562 #ifdef ASSERT
563 assert(callee_method != nullptr, "caller checks for optimized virtual!");
564 // Make sure the incoming jvms has the same information content as me.
565 // This means that we can eventually make this whole class AllStatic.
566 if (jvms->caller() == nullptr) {
567 assert(_caller_jvms == nullptr, "redundant instance state");
568 } else {
569 assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
570 }
571 assert(_method == jvms->method(), "redundant instance state");
572 #endif
573 int caller_bci = jvms->bci();
574 ciMethod* caller_method = jvms->method();
575
576 // Do some initial checks.
577 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
578 set_msg("failed initial checks");
579 print_inlining(callee_method, jvms, false /* !success */);
580 return false;
581 }
582
583 // Do some parse checks.
584 set_msg(check_can_parse(callee_method));
585 if (msg() != nullptr) {
586 print_inlining(callee_method, jvms, false /* !success */);
587 return false;
588 }
589
590 // Check if inlining policy says no.
591 bool success = try_to_inline(callee_method, caller_method, caller_bci, jvms, profile,
592 should_delay); // out
593 if (success) {
594 // Inline!
595 if (msg() == nullptr) {
596 set_msg("inline (hot)");
597 }
598 print_inlining(callee_method, jvms, true /* success */);
599 InlineTree* callee_tree = build_inline_tree_for_callee(callee_method, jvms, caller_bci);
600 if (should_delay) {
601 // Record late inlining decision in order to dump it for compiler replay
602 callee_tree->set_late_inline();
603 }
604 return true;
605 } else {
606 // Do not inline
607 if (msg() == nullptr) {
608 set_msg("too cold to inline");
609 }
610 print_inlining(callee_method, jvms, false /* !success */);
611 return false;
612 }
613 }
614
615 //------------------------------build_inline_tree_for_callee-------------------
616 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
617 // Attempt inlining.
618 InlineTree* old_ilt = callee_at(caller_bci, callee_method);
619 if (old_ilt != nullptr) {
620 return old_ilt;
621 }
622 int max_inline_level_adjust = 0;
623 if (caller_jvms->method() != nullptr) {
624 if (caller_jvms->method()->is_compiled_lambda_form()) {
625 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames
626 } else if (callee_method->is_method_handle_intrinsic() ||
627 callee_method->is_compiled_lambda_form()) {
628 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implementation
629 }
630 if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
631 C->inline_printer()->record(callee_method, caller_jvms, InliningResult::SUCCESS, " \\-> discounting inline depth");
632 }
633 if (max_inline_level_adjust != 0 && C->log()) {
634 int id1 = C->log()->identify(caller_jvms->method());
635 int id2 = C->log()->identify(callee_method);
636 C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
637 }
638 }
639 // Allocate in the comp_arena to make sure the InlineTree is live when dumping a replay compilation file
640 InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, _max_inline_level + max_inline_level_adjust);
641 _subtrees.append(ilt);
642
643 NOT_PRODUCT( _count_inlines += 1; )
644
645 return ilt;
646 }
647
648
649 //---------------------------------------callee_at-----------------------------
650 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
651 for (int i = 0; i < _subtrees.length(); i++) {
652 InlineTree* sub = _subtrees.at(i);
653 if (sub->caller_bci() == bci && callee == sub->method()) {
654 return sub;
655 }
656 }
657 return nullptr;
658 }
659
660
661 //------------------------------build_inline_tree_root-------------------------
662 InlineTree *InlineTree::build_inline_tree_root() {
663 Compile* C = Compile::current();
664
665 // Root of inline tree
666 InlineTree* ilt = new InlineTree(C, nullptr, C->method(), nullptr, -1, MaxInlineLevel);
667
668 return ilt;
669 }
670
671
672 //-------------------------find_subtree_from_root-----------------------------
673 // Given a jvms, which determines a call chain from the root method,
674 // find the corresponding inline tree.
675 // Note: This method will be removed or replaced as InlineTree goes away.
676 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
677 InlineTree* iltp = root;
678 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
679 for (uint d = 1; d <= depth; d++) {
680 JVMState* jvmsp = jvms->of_depth(d);
681 // Select the corresponding subtree for this bci.
682 assert(jvmsp->method() == iltp->method(), "tree still in sync");
683 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
684 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
685 if (sub == nullptr) {
686 if (d == depth) {
687 sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
688 }
689 guarantee(sub != nullptr, "should be a sub-ilt here");
690 return sub;
691 }
692 iltp = sub;
693 }
694 return iltp;
695 }
696
697 // Count number of nodes in this subtree
698 int InlineTree::count() const {
699 int result = 1;
700 for (int i = 0 ; i < _subtrees.length(); i++) {
701 result += _subtrees.at(i)->count();
702 }
703 return result;
704 }
705
706 void InlineTree::dump_replay_data(outputStream* out, int depth_adjust) {
707 out->print(" %d %d %d ", inline_level() + depth_adjust, caller_bci(), _late_inline);
708 method()->dump_name_as_ascii(out);
709 for (int i = 0 ; i < _subtrees.length(); i++) {
710 _subtrees.at(i)->dump_replay_data(out, depth_adjust);
711 }
712 }
713
714
715 #ifndef PRODUCT
716 void InlineTree::print_impl(outputStream* st, int indent) const {
717 for (int i = 0; i < indent; i++) st->print(" ");
718 st->print(" @ %d", caller_bci());
719 method()->print_short_name(st);
720 st->cr();
721
722 for (int i = 0 ; i < _subtrees.length(); i++) {
723 _subtrees.at(i)->print_impl(st, indent + 2);
724 }
725 }
726
727 void InlineTree::print_value_on(outputStream* st) const {
728 print_impl(st, 2);
729 }
730 #endif