< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"

  28 #include "classfile/vmSymbols.hpp"
  29 #include "compiler/compileBroker.hpp"
  30 #include "compiler/compileLog.hpp"
  31 #include "interpreter/linkResolver.hpp"

  32 #include "logging/log.hpp"
  33 #include "logging/logLevel.hpp"
  34 #include "logging/logMessage.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "opto/addnode.hpp"
  37 #include "opto/callGenerator.hpp"
  38 #include "opto/castnode.hpp"
  39 #include "opto/cfgnode.hpp"

  40 #include "opto/mulnode.hpp"
  41 #include "opto/parse.hpp"
  42 #include "opto/rootnode.hpp"
  43 #include "opto/runtime.hpp"
  44 #include "opto/subnode.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "utilities/macros.hpp"
  48 #if INCLUDE_JFR
  49 #include "jfr/jfr.hpp"
  50 #endif
  51 
  52 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  53                                      bool with_deco) {
  54   if (with_deco) {
  55     CompileTask::print_inline_indent(depth, out);
  56   }
  57   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  58   prof_klass->name()->print_symbol_on(out);
  59   if (with_deco) {

  69     if (!C->print_inlining()) {
  70       if (!PrintOpto && !PrintCompilation) {
  71         method->print_short_name();
  72         tty->cr();
  73       }
  74       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  75       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  76     } else {
  77       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  78       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  79     }
  80   }
  81 
  82   LogTarget(Debug, jit, inlining) lt;
  83   if (lt.is_enabled()) {
  84     LogStream ls(lt);
  85     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  86   }
  87 }
  88 


























































  89 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  90                                        JVMState* jvms, bool allow_inline,
  91                                        float prof_factor, ciKlass* speculative_receiver_type,
  92                                        bool allow_intrinsics) {
  93   assert(callee != nullptr, "failed method resolution");
  94 
  95   ciMethod*       caller      = jvms->method();
  96   int             bci         = jvms->bci();
  97   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
  98   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
  99 
 100   const bool is_virtual_or_interface = (bytecode == Bytecodes::_invokevirtual) ||
 101                                        (bytecode == Bytecodes::_invokeinterface) ||
 102                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual) ||
 103                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 104 
 105   const bool check_access = !orig_callee->is_method_handle_intrinsic(); // method handle intrinsics don't perform access checks
 106 
 107   // Dtrace currently doesn't work unless all calls are vanilla
 108   if (env()->dtrace_method_probes()) {

 130     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 131                     log->identify(callee), site_count, prof_factor);
 132     if (call_does_dispatch)  log->print(" virtual='1'");
 133     if (allow_inline)     log->print(" inline='1'");
 134     if (receiver_count >= 0) {
 135       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 136       if (profile.has_receiver(1)) {
 137         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 138       }
 139     }
 140     if (callee->is_method_handle_intrinsic()) {
 141       log->print(" method_handle_intrinsic='1'");
 142     }
 143     log->end_elem();
 144   }
 145 
 146   // Special case the handling of certain common, profitable library
 147   // methods.  If these methods are replaced with specialized code,
 148   // then we return it as the inlined version of the call.
 149   CallGenerator* cg_intrinsic = nullptr;
 150   if (allow_inline && allow_intrinsics) {














 151     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 152     if (cg != nullptr) {
 153       if (cg->is_predicated()) {
 154         // Code without intrinsic but, hopefully, inlined.
 155         CallGenerator* inline_cg = this->call_generator(callee,
 156               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 157         if (inline_cg != nullptr) {
 158           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 159         }
 160       }
 161 
 162       // If intrinsic does the virtual dispatch, we try to use the type profile
 163       // first, and hopefully inline it as the regular virtual call below.
 164       // We will retry the intrinsic if nothing had claimed it afterwards.
 165       if (cg->does_virtual_dispatch()) {
 166         cg_intrinsic = cg;
 167         cg = nullptr;
 168       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 169         return CallGenerator::for_late_inline(callee, cg);
 170       } else {

 590   // Speculative type of the receiver if any
 591   ciKlass* speculative_receiver_type = nullptr;
 592   if (is_virtual_or_interface) {
 593     Node* receiver_node             = stack(sp() - nargs);
 594     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 595     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 596     // For arrays, klass below is Object. When vtable calls are used,
 597     // resolving the call with Object would allow an illegal call to
 598     // finalize() on an array. We use holder instead: illegal calls to
 599     // finalize() won't be compiled as vtable calls (IC call
 600     // resolution will catch the illegal call) and the few legal calls
 601     // on array types won't be either.
 602     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 603                                       receiver_type, is_virtual,
 604                                       call_does_dispatch, vtable_index);  // out-parameters
 605     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 606   }
 607 
 608   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 609   ciKlass* receiver_constraint = nullptr;
 610   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 611     ciInstanceKlass* calling_klass = method()->holder();
 612     ciInstanceKlass* sender_klass = calling_klass;
 613     if (sender_klass->is_interface()) {
 614       receiver_constraint = sender_klass;
 615     }
 616   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 617     assert(holder->is_interface(), "How did we get a non-interface method here!");
 618     receiver_constraint = holder;
 619   }
 620 
 621   if (receiver_constraint != nullptr) {
 622     Node* receiver_node = stack(sp() - nargs);
 623     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 624     Node* bad_type_ctrl = nullptr;
 625     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 626     if (bad_type_ctrl != nullptr) {
 627       PreserveJVMState pjvms(this);
 628       set_control(bad_type_ctrl);
 629       uncommon_trap(Deoptimization::Reason_class_check,
 630                     Deoptimization::Action_none);
 631     }
 632     if (stopped()) {
 633       return; // MUST uncommon-trap?
 634     }
 635     set_stack(sp() - nargs, casted_receiver);
 636   }
 637 









 638   // Note:  It's OK to try to inline a virtual call.
 639   // The call generator will not attempt to inline a polymorphic call
 640   // unless it knows how to optimize the receiver dispatch.
 641   bool try_inline = (C->do_inlining() || InlineAccessors);
 642 
 643   // ---------------------
 644   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 645   JVMState* jvms = sync_jvms();
 646 
 647   // ---------------------
 648   // Decide call tactic.
 649   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 650   // It decides whether inlining is desirable or not.
 651   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);




 652 
 653   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 654   orig_callee = callee = nullptr;
 655 
 656   // ---------------------
 657 
 658   // Feed profiling data for arguments to the type system so it can
 659   // propagate it as speculative types
 660   record_profiled_arguments_for_speculation(cg->method(), bc());
 661 
 662 #ifndef PRODUCT
 663   // bump global counters for calls
 664   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 665 
 666   // Record first part of parsing work for this call
 667   parse_histogram()->record_change();
 668 #endif // not PRODUCT
 669 
 670   assert(jvms == this->jvms(), "still operating on the right JVMS");
 671   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 718 
 719   assert(check_call_consistency(jvms, cg), "inconsistent info");
 720 
 721   if (!stopped()) {
 722     // This was some sort of virtual call, which did a null check for us.
 723     // Now we can assert receiver-not-null, on the normal return path.
 724     if (receiver != nullptr && cg->is_virtual()) {
 725       Node* cast = cast_not_null(receiver);
 726       // %%% assert(receiver == cast, "should already have cast the receiver");
 727     }
 728 
 729     ciType* rtype = cg->method()->return_type();
 730     ciType* ctype = declared_signature->return_type();
 731 
 732     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 733       // Be careful here with return types.
 734       if (ctype != rtype) {
 735         BasicType rt = rtype->basic_type();
 736         BasicType ct = ctype->basic_type();
 737         if (ct == T_VOID) {
 738           // It's OK for a method  to return a value that is discarded.
 739           // The discarding does not require any special action from the caller.
 740           // The Java code knows this, at VerifyType.isNullConversion.
 741           pop_node(rt);  // whatever it was, pop it
 742         } else if (rt == T_INT || is_subword_type(rt)) {
 743           // Nothing.  These cases are handled in lambda form bytecode.
 744           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 745         } else if (is_reference_type(rt)) {
 746           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 747           if (ctype->is_loaded()) {
 748             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 749             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 750             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 751               Node* retnode = pop();
 752               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 753               push(cast_obj);
 754             }
 755           }
 756         } else {
 757           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 758           // push a zero; it's better than getting an oop/int mismatch

 776     // If the return type of the method is not loaded, assert that the
 777     // value we got is a null.  Otherwise, we need to recompile.
 778     if (!rtype->is_loaded()) {
 779       if (PrintOpto && (Verbose || WizardMode)) {
 780         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 781         cg->method()->print_name(); tty->cr();
 782       }
 783       if (C->log() != nullptr) {
 784         C->log()->elem("assert_null reason='return' klass='%d'",
 785                        C->log()->identify(rtype));
 786       }
 787       // If there is going to be a trap, put it at the next bytecode:
 788       set_bci(iter().next_bci());
 789       null_assert(peek());
 790       set_bci(iter().cur_bci()); // put it back
 791     }
 792     BasicType ct = ctype->basic_type();
 793     if (is_reference_type(ct)) {
 794       record_profiled_return_for_speculation();
 795     }





















 796   }
 797 
 798   // Restart record of parsing work after possible inlining of call
 799 #ifndef PRODUCT
 800   parse_histogram()->set_initial_state(bc());
 801 #endif
 802 }
 803 
 804 //---------------------------catch_call_exceptions-----------------------------
 805 // Put a Catch and CatchProj nodes behind a just-created call.
 806 // Send their caught exceptions to the proper handler.
 807 // This may be used after a call to the rethrow VM stub,
 808 // when it is needed to process unloaded exception classes.
 809 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 810   // Exceptions are delivered through this channel:
 811   Node* i_o = this->i_o();
 812 
 813   // Add a CatchNode.
 814   Arena tmp_mem{mtCompiler};
 815   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "jvm_io.h"
  34 #include "logging/log.hpp"
  35 #include "logging/logLevel.hpp"
  36 #include "logging/logMessage.hpp"
  37 #include "logging/logStream.hpp"
  38 #include "opto/addnode.hpp"
  39 #include "opto/callGenerator.hpp"
  40 #include "opto/castnode.hpp"
  41 #include "opto/cfgnode.hpp"
  42 #include "opto/inlinetypenode.hpp"
  43 #include "opto/mulnode.hpp"
  44 #include "opto/parse.hpp"
  45 #include "opto/rootnode.hpp"
  46 #include "opto/runtime.hpp"
  47 #include "opto/subnode.hpp"
  48 #include "prims/methodHandles.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "utilities/macros.hpp"
  51 #if INCLUDE_JFR
  52 #include "jfr/jfr.hpp"
  53 #endif
  54 
  55 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  56                                      bool with_deco) {
  57   if (with_deco) {
  58     CompileTask::print_inline_indent(depth, out);
  59   }
  60   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  61   prof_klass->name()->print_symbol_on(out);
  62   if (with_deco) {

  72     if (!C->print_inlining()) {
  73       if (!PrintOpto && !PrintCompilation) {
  74         method->print_short_name();
  75         tty->cr();
  76       }
  77       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  78       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  79     } else {
  80       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  81       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  82     }
  83   }
  84 
  85   LogTarget(Debug, jit, inlining) lt;
  86   if (lt.is_enabled()) {
  87     LogStream ls(lt);
  88     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  89   }
  90 }
  91 
  92 static bool arg_can_be_larval(ciMethod* callee, int arg_idx) {
  93   if (callee->is_object_constructor() && arg_idx == 0) {
  94     return true;
  95   }
  96 
  97   if (arg_idx != 1 || callee->intrinsic_id() == vmIntrinsicID::_none) {
  98     return false;
  99   }
 100 
 101   switch (callee->intrinsic_id()) {
 102     case vmIntrinsicID::_finishPrivateBuffer:
 103     case vmIntrinsicID::_putBoolean:
 104     case vmIntrinsicID::_putBooleanOpaque:
 105     case vmIntrinsicID::_putBooleanRelease:
 106     case vmIntrinsicID::_putBooleanVolatile:
 107     case vmIntrinsicID::_putByte:
 108     case vmIntrinsicID::_putByteOpaque:
 109     case vmIntrinsicID::_putByteRelease:
 110     case vmIntrinsicID::_putByteVolatile:
 111     case vmIntrinsicID::_putChar:
 112     case vmIntrinsicID::_putCharOpaque:
 113     case vmIntrinsicID::_putCharRelease:
 114     case vmIntrinsicID::_putCharUnaligned:
 115     case vmIntrinsicID::_putCharVolatile:
 116     case vmIntrinsicID::_putShort:
 117     case vmIntrinsicID::_putShortOpaque:
 118     case vmIntrinsicID::_putShortRelease:
 119     case vmIntrinsicID::_putShortUnaligned:
 120     case vmIntrinsicID::_putShortVolatile:
 121     case vmIntrinsicID::_putInt:
 122     case vmIntrinsicID::_putIntOpaque:
 123     case vmIntrinsicID::_putIntRelease:
 124     case vmIntrinsicID::_putIntUnaligned:
 125     case vmIntrinsicID::_putIntVolatile:
 126     case vmIntrinsicID::_putLong:
 127     case vmIntrinsicID::_putLongOpaque:
 128     case vmIntrinsicID::_putLongRelease:
 129     case vmIntrinsicID::_putLongUnaligned:
 130     case vmIntrinsicID::_putLongVolatile:
 131     case vmIntrinsicID::_putFloat:
 132     case vmIntrinsicID::_putFloatOpaque:
 133     case vmIntrinsicID::_putFloatRelease:
 134     case vmIntrinsicID::_putFloatVolatile:
 135     case vmIntrinsicID::_putDouble:
 136     case vmIntrinsicID::_putDoubleOpaque:
 137     case vmIntrinsicID::_putDoubleRelease:
 138     case vmIntrinsicID::_putDoubleVolatile:
 139     case vmIntrinsicID::_putReference:
 140     case vmIntrinsicID::_putReferenceOpaque:
 141     case vmIntrinsicID::_putReferenceRelease:
 142     case vmIntrinsicID::_putReferenceVolatile:
 143     case vmIntrinsicID::_putValue:
 144       return true;
 145     default:
 146       return false;
 147   }
 148 }
 149 
 150 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
 151                                        JVMState* jvms, bool allow_inline,
 152                                        float prof_factor, ciKlass* speculative_receiver_type,
 153                                        bool allow_intrinsics) {
 154   assert(callee != nullptr, "failed method resolution");
 155 
 156   ciMethod*       caller      = jvms->method();
 157   int             bci         = jvms->bci();
 158   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
 159   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
 160 
 161   const bool is_virtual_or_interface = (bytecode == Bytecodes::_invokevirtual) ||
 162                                        (bytecode == Bytecodes::_invokeinterface) ||
 163                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual) ||
 164                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 165 
 166   const bool check_access = !orig_callee->is_method_handle_intrinsic(); // method handle intrinsics don't perform access checks
 167 
 168   // Dtrace currently doesn't work unless all calls are vanilla
 169   if (env()->dtrace_method_probes()) {

 191     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 192                     log->identify(callee), site_count, prof_factor);
 193     if (call_does_dispatch)  log->print(" virtual='1'");
 194     if (allow_inline)     log->print(" inline='1'");
 195     if (receiver_count >= 0) {
 196       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 197       if (profile.has_receiver(1)) {
 198         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 199       }
 200     }
 201     if (callee->is_method_handle_intrinsic()) {
 202       log->print(" method_handle_intrinsic='1'");
 203     }
 204     log->end_elem();
 205   }
 206 
 207   // Special case the handling of certain common, profitable library
 208   // methods.  If these methods are replaced with specialized code,
 209   // then we return it as the inlined version of the call.
 210   CallGenerator* cg_intrinsic = nullptr;
 211   if (callee->intrinsic_id() == vmIntrinsics::_makePrivateBuffer || callee->intrinsic_id() == vmIntrinsics::_finishPrivateBuffer) {
 212     // These methods must be inlined so that we don't have larval value objects crossing method
 213     // boundaries
 214     assert(!call_does_dispatch, "callee should not be virtual %s", callee->name()->as_utf8());
 215     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 216 
 217     if (cg == nullptr) {
 218       // This is probably because the intrinsics is disabled from the command line
 219       char reason[256];
 220       jio_snprintf(reason, sizeof(reason), "cannot find an intrinsics for %s", callee->name()->as_utf8());
 221       C->record_method_not_compilable(reason);
 222       return nullptr;
 223     }
 224     return cg;
 225   } else if (allow_inline && allow_intrinsics) {
 226     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 227     if (cg != nullptr) {
 228       if (cg->is_predicated()) {
 229         // Code without intrinsic but, hopefully, inlined.
 230         CallGenerator* inline_cg = this->call_generator(callee,
 231               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 232         if (inline_cg != nullptr) {
 233           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 234         }
 235       }
 236 
 237       // If intrinsic does the virtual dispatch, we try to use the type profile
 238       // first, and hopefully inline it as the regular virtual call below.
 239       // We will retry the intrinsic if nothing had claimed it afterwards.
 240       if (cg->does_virtual_dispatch()) {
 241         cg_intrinsic = cg;
 242         cg = nullptr;
 243       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 244         return CallGenerator::for_late_inline(callee, cg);
 245       } else {

 665   // Speculative type of the receiver if any
 666   ciKlass* speculative_receiver_type = nullptr;
 667   if (is_virtual_or_interface) {
 668     Node* receiver_node             = stack(sp() - nargs);
 669     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 670     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 671     // For arrays, klass below is Object. When vtable calls are used,
 672     // resolving the call with Object would allow an illegal call to
 673     // finalize() on an array. We use holder instead: illegal calls to
 674     // finalize() won't be compiled as vtable calls (IC call
 675     // resolution will catch the illegal call) and the few legal calls
 676     // on array types won't be either.
 677     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 678                                       receiver_type, is_virtual,
 679                                       call_does_dispatch, vtable_index);  // out-parameters
 680     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 681   }
 682 
 683   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 684   ciKlass* receiver_constraint = nullptr;
 685   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 686     ciInstanceKlass* calling_klass = method()->holder();
 687     ciInstanceKlass* sender_klass = calling_klass;
 688     if (sender_klass->is_interface()) {
 689       receiver_constraint = sender_klass;
 690     }
 691   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 692     assert(holder->is_interface(), "How did we get a non-interface method here!");
 693     receiver_constraint = holder;
 694   }
 695 
 696   if (receiver_constraint != nullptr) {
 697     Node* receiver_node = stack(sp() - nargs);
 698     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 699     Node* bad_type_ctrl = nullptr;
 700     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 701     if (bad_type_ctrl != nullptr) {
 702       PreserveJVMState pjvms(this);
 703       set_control(bad_type_ctrl);
 704       uncommon_trap(Deoptimization::Reason_class_check,
 705                     Deoptimization::Action_none);
 706     }
 707     if (stopped()) {
 708       return; // MUST uncommon-trap?
 709     }
 710     set_stack(sp() - nargs, casted_receiver);
 711   }
 712 
 713   // Scalarize value objects passed into this invocation if we know that they are not larval
 714   for (int arg_idx = 0; arg_idx < nargs; arg_idx++) {
 715     if (arg_can_be_larval(callee, arg_idx)) {
 716       continue;
 717     }
 718 
 719     cast_to_non_larval(peek(nargs - 1 - arg_idx));
 720   }
 721 
 722   // Note:  It's OK to try to inline a virtual call.
 723   // The call generator will not attempt to inline a polymorphic call
 724   // unless it knows how to optimize the receiver dispatch.
 725   bool try_inline = (C->do_inlining() || InlineAccessors);
 726 
 727   // ---------------------
 728   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 729   JVMState* jvms = sync_jvms();
 730 
 731   // ---------------------
 732   // Decide call tactic.
 733   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 734   // It decides whether inlining is desirable or not.
 735   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
 736   if (failing()) {
 737     return;
 738   }
 739   assert(cg != nullptr, "must find a CallGenerator for callee %s", callee->name()->as_utf8());
 740 
 741   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 742   orig_callee = callee = nullptr;
 743 
 744   // ---------------------
 745 
 746   // Feed profiling data for arguments to the type system so it can
 747   // propagate it as speculative types
 748   record_profiled_arguments_for_speculation(cg->method(), bc());
 749 
 750 #ifndef PRODUCT
 751   // bump global counters for calls
 752   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 753 
 754   // Record first part of parsing work for this call
 755   parse_histogram()->record_change();
 756 #endif // not PRODUCT
 757 
 758   assert(jvms == this->jvms(), "still operating on the right JVMS");
 759   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 806 
 807   assert(check_call_consistency(jvms, cg), "inconsistent info");
 808 
 809   if (!stopped()) {
 810     // This was some sort of virtual call, which did a null check for us.
 811     // Now we can assert receiver-not-null, on the normal return path.
 812     if (receiver != nullptr && cg->is_virtual()) {
 813       Node* cast = cast_not_null(receiver);
 814       // %%% assert(receiver == cast, "should already have cast the receiver");
 815     }
 816 
 817     ciType* rtype = cg->method()->return_type();
 818     ciType* ctype = declared_signature->return_type();
 819 
 820     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 821       // Be careful here with return types.
 822       if (ctype != rtype) {
 823         BasicType rt = rtype->basic_type();
 824         BasicType ct = ctype->basic_type();
 825         if (ct == T_VOID) {
 826           // It's OK for a method to return a value that is discarded.
 827           // The discarding does not require any special action from the caller.
 828           // The Java code knows this, at VerifyType.isNullConversion.
 829           pop_node(rt);  // whatever it was, pop it
 830         } else if (rt == T_INT || is_subword_type(rt)) {
 831           // Nothing.  These cases are handled in lambda form bytecode.
 832           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 833         } else if (is_reference_type(rt)) {
 834           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 835           if (ctype->is_loaded()) {
 836             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 837             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 838             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 839               Node* retnode = pop();
 840               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 841               push(cast_obj);
 842             }
 843           }
 844         } else {
 845           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 846           // push a zero; it's better than getting an oop/int mismatch

 864     // If the return type of the method is not loaded, assert that the
 865     // value we got is a null.  Otherwise, we need to recompile.
 866     if (!rtype->is_loaded()) {
 867       if (PrintOpto && (Verbose || WizardMode)) {
 868         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 869         cg->method()->print_name(); tty->cr();
 870       }
 871       if (C->log() != nullptr) {
 872         C->log()->elem("assert_null reason='return' klass='%d'",
 873                        C->log()->identify(rtype));
 874       }
 875       // If there is going to be a trap, put it at the next bytecode:
 876       set_bci(iter().next_bci());
 877       null_assert(peek());
 878       set_bci(iter().cur_bci()); // put it back
 879     }
 880     BasicType ct = ctype->basic_type();
 881     if (is_reference_type(ct)) {
 882       record_profiled_return_for_speculation();
 883     }
 884 
 885     if (!rtype->is_void() && cg->method()->intrinsic_id() != vmIntrinsicID::_makePrivateBuffer) {
 886       Node* retnode = peek();
 887       const Type* rettype = gvn().type(retnode);
 888       if (rettype->is_inlinetypeptr() && !retnode->is_InlineType()) {
 889         retnode = InlineTypeNode::make_from_oop(this, retnode, rettype->inline_klass());
 890         dec_sp(1);
 891         push(retnode);
 892       }
 893     }
 894 
 895     if (cg->method()->is_object_constructor() && receiver != nullptr && gvn().type(receiver)->is_inlinetypeptr()) {
 896       InlineTypeNode* non_larval = InlineTypeNode::make_from_oop(this, receiver, gvn().type(receiver)->inline_klass());
 897       // Relinquish the oop input, we will delay the allocation to the point it is needed, see the
 898       // comments in InlineTypeNode::Ideal for more details
 899       non_larval = non_larval->clone_if_required(&gvn(), nullptr);
 900       non_larval->set_oop(gvn(), null());
 901       non_larval->set_is_buffered(gvn(), false);
 902       non_larval = gvn().transform(non_larval)->as_InlineType();
 903       map()->replace_edge(receiver, non_larval);
 904     }
 905   }
 906 
 907   // Restart record of parsing work after possible inlining of call
 908 #ifndef PRODUCT
 909   parse_histogram()->set_initial_state(bc());
 910 #endif
 911 }
 912 
 913 //---------------------------catch_call_exceptions-----------------------------
 914 // Put a Catch and CatchProj nodes behind a just-created call.
 915 // Send their caught exceptions to the proper handler.
 916 // This may be used after a call to the rethrow VM stub,
 917 // when it is needed to process unloaded exception classes.
 918 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 919   // Exceptions are delivered through this channel:
 920   Node* i_o = this->i_o();
 921 
 922   // Add a CatchNode.
 923   Arena tmp_mem{mtCompiler};
 924   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
< prev index next >