< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"

  28 #include "classfile/vmSymbols.hpp"
  29 #include "compiler/compileBroker.hpp"
  30 #include "compiler/compileLog.hpp"
  31 #include "interpreter/linkResolver.hpp"

  32 #include "logging/log.hpp"
  33 #include "logging/logLevel.hpp"
  34 #include "logging/logMessage.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "opto/addnode.hpp"
  37 #include "opto/callGenerator.hpp"
  38 #include "opto/castnode.hpp"
  39 #include "opto/cfgnode.hpp"

  40 #include "opto/mulnode.hpp"
  41 #include "opto/parse.hpp"
  42 #include "opto/rootnode.hpp"
  43 #include "opto/runtime.hpp"
  44 #include "opto/subnode.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "utilities/macros.hpp"
  48 #if INCLUDE_JFR
  49 #include "jfr/jfr.hpp"
  50 #endif
  51 
  52 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  53                                      bool with_deco) {
  54   if (with_deco) {
  55     CompileTask::print_inline_indent(depth, out);
  56   }
  57   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  58   prof_klass->name()->print_symbol_on(out);
  59   if (with_deco) {

  69     if (!C->print_inlining()) {
  70       if (!PrintOpto && !PrintCompilation) {
  71         method->print_short_name();
  72         tty->cr();
  73       }
  74       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  75       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  76     } else {
  77       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  78       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  79     }
  80   }
  81 
  82   LogTarget(Debug, jit, inlining) lt;
  83   if (lt.is_enabled()) {
  84     LogStream ls(lt);
  85     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  86   }
  87 }
  88 


























































  89 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  90                                        JVMState* jvms, bool allow_inline,
  91                                        float prof_factor, ciKlass* speculative_receiver_type,
  92                                        bool allow_intrinsics) {
  93   assert(callee != nullptr, "failed method resolution");
  94 
  95   ciMethod*       caller      = jvms->method();
  96   int             bci         = jvms->bci();
  97   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
  98   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
  99 
 100   const bool is_virtual_or_interface = (bytecode == Bytecodes::_invokevirtual) ||
 101                                        (bytecode == Bytecodes::_invokeinterface) ||
 102                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual) ||
 103                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 104 
 105   // Dtrace currently doesn't work unless all calls are vanilla
 106   if (env()->dtrace_method_probes()) {
 107     allow_inline = false;
 108   }

 128     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 129                     log->identify(callee), site_count, prof_factor);
 130     if (call_does_dispatch)  log->print(" virtual='1'");
 131     if (allow_inline)     log->print(" inline='1'");
 132     if (receiver_count >= 0) {
 133       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 134       if (profile.has_receiver(1)) {
 135         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 136       }
 137     }
 138     if (callee->is_method_handle_intrinsic()) {
 139       log->print(" method_handle_intrinsic='1'");
 140     }
 141     log->end_elem();
 142   }
 143 
 144   // Special case the handling of certain common, profitable library
 145   // methods.  If these methods are replaced with specialized code,
 146   // then we return it as the inlined version of the call.
 147   CallGenerator* cg_intrinsic = nullptr;
 148   if (allow_inline && allow_intrinsics) {














 149     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 150     if (cg != nullptr) {
 151       if (cg->is_predicated()) {
 152         // Code without intrinsic but, hopefully, inlined.
 153         CallGenerator* inline_cg = this->call_generator(callee,
 154               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 155         if (inline_cg != nullptr) {
 156           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 157         }
 158       }
 159 
 160       // If intrinsic does the virtual dispatch, we try to use the type profile
 161       // first, and hopefully inline it as the regular virtual call below.
 162       // We will retry the intrinsic if nothing had claimed it afterwards.
 163       if (cg->does_virtual_dispatch()) {
 164         cg_intrinsic = cg;
 165         cg = nullptr;
 166       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 167         return CallGenerator::for_late_inline(callee, cg);
 168       } else {

 584   // Speculative type of the receiver if any
 585   ciKlass* speculative_receiver_type = nullptr;
 586   if (is_virtual_or_interface) {
 587     Node* receiver_node             = stack(sp() - nargs);
 588     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 589     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 590     // For arrays, klass below is Object. When vtable calls are used,
 591     // resolving the call with Object would allow an illegal call to
 592     // finalize() on an array. We use holder instead: illegal calls to
 593     // finalize() won't be compiled as vtable calls (IC call
 594     // resolution will catch the illegal call) and the few legal calls
 595     // on array types won't be either.
 596     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 597                                       receiver_type, is_virtual,
 598                                       call_does_dispatch, vtable_index);  // out-parameters
 599     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 600   }
 601 
 602   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 603   ciKlass* receiver_constraint = nullptr;
 604   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 605     ciInstanceKlass* calling_klass = method()->holder();
 606     ciInstanceKlass* sender_klass = calling_klass;
 607     if (sender_klass->is_interface()) {
 608       receiver_constraint = sender_klass;
 609     }
 610   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 611     assert(holder->is_interface(), "How did we get a non-interface method here!");
 612     receiver_constraint = holder;
 613   }
 614 
 615   if (receiver_constraint != nullptr) {
 616     Node* receiver_node = stack(sp() - nargs);
 617     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 618     Node* bad_type_ctrl = nullptr;
 619     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 620     if (bad_type_ctrl != nullptr) {
 621       PreserveJVMState pjvms(this);
 622       set_control(bad_type_ctrl);
 623       uncommon_trap(Deoptimization::Reason_class_check,
 624                     Deoptimization::Action_none);
 625     }
 626     if (stopped()) {
 627       return; // MUST uncommon-trap?
 628     }
 629     set_stack(sp() - nargs, casted_receiver);
 630   }
 631 









 632   // Note:  It's OK to try to inline a virtual call.
 633   // The call generator will not attempt to inline a polymorphic call
 634   // unless it knows how to optimize the receiver dispatch.
 635   bool try_inline = (C->do_inlining() || InlineAccessors);
 636 
 637   // ---------------------
 638   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 639   JVMState* jvms = sync_jvms();
 640 
 641   // ---------------------
 642   // Decide call tactic.
 643   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 644   // It decides whether inlining is desirable or not.
 645   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);




 646 
 647   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 648   orig_callee = callee = nullptr;
 649 
 650   // ---------------------
 651 
 652   // Feed profiling data for arguments to the type system so it can
 653   // propagate it as speculative types
 654   record_profiled_arguments_for_speculation(cg->method(), bc());
 655 
 656 #ifndef PRODUCT
 657   // bump global counters for calls
 658   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 659 
 660   // Record first part of parsing work for this call
 661   parse_histogram()->record_change();
 662 #endif // not PRODUCT
 663 
 664   assert(jvms == this->jvms(), "still operating on the right JVMS");
 665   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 712 
 713   assert(check_call_consistency(jvms, cg), "inconsistent info");
 714 
 715   if (!stopped()) {
 716     // This was some sort of virtual call, which did a null check for us.
 717     // Now we can assert receiver-not-null, on the normal return path.
 718     if (receiver != nullptr && cg->is_virtual()) {
 719       Node* cast = cast_not_null(receiver);
 720       // %%% assert(receiver == cast, "should already have cast the receiver");
 721     }
 722 
 723     ciType* rtype = cg->method()->return_type();
 724     ciType* ctype = declared_signature->return_type();
 725 
 726     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 727       // Be careful here with return types.
 728       if (ctype != rtype) {
 729         BasicType rt = rtype->basic_type();
 730         BasicType ct = ctype->basic_type();
 731         if (ct == T_VOID) {
 732           // It's OK for a method  to return a value that is discarded.
 733           // The discarding does not require any special action from the caller.
 734           // The Java code knows this, at VerifyType.isNullConversion.
 735           pop_node(rt);  // whatever it was, pop it
 736         } else if (rt == T_INT || is_subword_type(rt)) {
 737           // Nothing.  These cases are handled in lambda form bytecode.
 738           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 739         } else if (is_reference_type(rt)) {
 740           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 741           if (ctype->is_loaded()) {
 742             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 743             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 744             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 745               Node* retnode = pop();
 746               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 747               push(cast_obj);
 748             }
 749           }
 750         } else {
 751           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 752           // push a zero; it's better than getting an oop/int mismatch

 770     // If the return type of the method is not loaded, assert that the
 771     // value we got is a null.  Otherwise, we need to recompile.
 772     if (!rtype->is_loaded()) {
 773       if (PrintOpto && (Verbose || WizardMode)) {
 774         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 775         cg->method()->print_name(); tty->cr();
 776       }
 777       if (C->log() != nullptr) {
 778         C->log()->elem("assert_null reason='return' klass='%d'",
 779                        C->log()->identify(rtype));
 780       }
 781       // If there is going to be a trap, put it at the next bytecode:
 782       set_bci(iter().next_bci());
 783       null_assert(peek());
 784       set_bci(iter().cur_bci()); // put it back
 785     }
 786     BasicType ct = ctype->basic_type();
 787     if (is_reference_type(ct)) {
 788       record_profiled_return_for_speculation();
 789     }





















 790   }
 791 
 792   // Restart record of parsing work after possible inlining of call
 793 #ifndef PRODUCT
 794   parse_histogram()->set_initial_state(bc());
 795 #endif
 796 }
 797 
 798 //---------------------------catch_call_exceptions-----------------------------
 799 // Put a Catch and CatchProj nodes behind a just-created call.
 800 // Send their caught exceptions to the proper handler.
 801 // This may be used after a call to the rethrow VM stub,
 802 // when it is needed to process unloaded exception classes.
 803 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 804   // Exceptions are delivered through this channel:
 805   Node* i_o = this->i_o();
 806 
 807   // Add a CatchNode.
 808   Arena tmp_mem{mtCompiler};
 809   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "jvm_io.h"
  34 #include "logging/log.hpp"
  35 #include "logging/logLevel.hpp"
  36 #include "logging/logMessage.hpp"
  37 #include "logging/logStream.hpp"
  38 #include "opto/addnode.hpp"
  39 #include "opto/callGenerator.hpp"
  40 #include "opto/castnode.hpp"
  41 #include "opto/cfgnode.hpp"
  42 #include "opto/inlinetypenode.hpp"
  43 #include "opto/mulnode.hpp"
  44 #include "opto/parse.hpp"
  45 #include "opto/rootnode.hpp"
  46 #include "opto/runtime.hpp"
  47 #include "opto/subnode.hpp"
  48 #include "prims/methodHandles.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "utilities/macros.hpp"
  51 #if INCLUDE_JFR
  52 #include "jfr/jfr.hpp"
  53 #endif
  54 
  55 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  56                                      bool with_deco) {
  57   if (with_deco) {
  58     CompileTask::print_inline_indent(depth, out);
  59   }
  60   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  61   prof_klass->name()->print_symbol_on(out);
  62   if (with_deco) {

  72     if (!C->print_inlining()) {
  73       if (!PrintOpto && !PrintCompilation) {
  74         method->print_short_name();
  75         tty->cr();
  76       }
  77       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  78       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  79     } else {
  80       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  81       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  82     }
  83   }
  84 
  85   LogTarget(Debug, jit, inlining) lt;
  86   if (lt.is_enabled()) {
  87     LogStream ls(lt);
  88     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  89   }
  90 }
  91 
  92 static bool arg_can_be_larval(ciMethod* callee, int arg_idx) {
  93   if (callee->is_object_constructor() && arg_idx == 0) {
  94     return true;
  95   }
  96 
  97   if (arg_idx != 1 || callee->intrinsic_id() == vmIntrinsicID::_none) {
  98     return false;
  99   }
 100 
 101   switch (callee->intrinsic_id()) {
 102     case vmIntrinsicID::_finishPrivateBuffer:
 103     case vmIntrinsicID::_putBoolean:
 104     case vmIntrinsicID::_putBooleanOpaque:
 105     case vmIntrinsicID::_putBooleanRelease:
 106     case vmIntrinsicID::_putBooleanVolatile:
 107     case vmIntrinsicID::_putByte:
 108     case vmIntrinsicID::_putByteOpaque:
 109     case vmIntrinsicID::_putByteRelease:
 110     case vmIntrinsicID::_putByteVolatile:
 111     case vmIntrinsicID::_putChar:
 112     case vmIntrinsicID::_putCharOpaque:
 113     case vmIntrinsicID::_putCharRelease:
 114     case vmIntrinsicID::_putCharUnaligned:
 115     case vmIntrinsicID::_putCharVolatile:
 116     case vmIntrinsicID::_putShort:
 117     case vmIntrinsicID::_putShortOpaque:
 118     case vmIntrinsicID::_putShortRelease:
 119     case vmIntrinsicID::_putShortUnaligned:
 120     case vmIntrinsicID::_putShortVolatile:
 121     case vmIntrinsicID::_putInt:
 122     case vmIntrinsicID::_putIntOpaque:
 123     case vmIntrinsicID::_putIntRelease:
 124     case vmIntrinsicID::_putIntUnaligned:
 125     case vmIntrinsicID::_putIntVolatile:
 126     case vmIntrinsicID::_putLong:
 127     case vmIntrinsicID::_putLongOpaque:
 128     case vmIntrinsicID::_putLongRelease:
 129     case vmIntrinsicID::_putLongUnaligned:
 130     case vmIntrinsicID::_putLongVolatile:
 131     case vmIntrinsicID::_putFloat:
 132     case vmIntrinsicID::_putFloatOpaque:
 133     case vmIntrinsicID::_putFloatRelease:
 134     case vmIntrinsicID::_putFloatVolatile:
 135     case vmIntrinsicID::_putDouble:
 136     case vmIntrinsicID::_putDoubleOpaque:
 137     case vmIntrinsicID::_putDoubleRelease:
 138     case vmIntrinsicID::_putDoubleVolatile:
 139     case vmIntrinsicID::_putReference:
 140     case vmIntrinsicID::_putReferenceOpaque:
 141     case vmIntrinsicID::_putReferenceRelease:
 142     case vmIntrinsicID::_putReferenceVolatile:
 143     case vmIntrinsicID::_putValue:
 144       return true;
 145     default:
 146       return false;
 147   }
 148 }
 149 
 150 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
 151                                        JVMState* jvms, bool allow_inline,
 152                                        float prof_factor, ciKlass* speculative_receiver_type,
 153                                        bool allow_intrinsics) {
 154   assert(callee != nullptr, "failed method resolution");
 155 
 156   ciMethod*       caller      = jvms->method();
 157   int             bci         = jvms->bci();
 158   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
 159   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
 160 
 161   const bool is_virtual_or_interface = (bytecode == Bytecodes::_invokevirtual) ||
 162                                        (bytecode == Bytecodes::_invokeinterface) ||
 163                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual) ||
 164                                        (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 165 
 166   // Dtrace currently doesn't work unless all calls are vanilla
 167   if (env()->dtrace_method_probes()) {
 168     allow_inline = false;
 169   }

 189     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 190                     log->identify(callee), site_count, prof_factor);
 191     if (call_does_dispatch)  log->print(" virtual='1'");
 192     if (allow_inline)     log->print(" inline='1'");
 193     if (receiver_count >= 0) {
 194       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 195       if (profile.has_receiver(1)) {
 196         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 197       }
 198     }
 199     if (callee->is_method_handle_intrinsic()) {
 200       log->print(" method_handle_intrinsic='1'");
 201     }
 202     log->end_elem();
 203   }
 204 
 205   // Special case the handling of certain common, profitable library
 206   // methods.  If these methods are replaced with specialized code,
 207   // then we return it as the inlined version of the call.
 208   CallGenerator* cg_intrinsic = nullptr;
 209   if (callee->intrinsic_id() == vmIntrinsics::_makePrivateBuffer || callee->intrinsic_id() == vmIntrinsics::_finishPrivateBuffer) {
 210     // These methods must be inlined so that we don't have larval value objects crossing method
 211     // boundaries
 212     assert(!call_does_dispatch, "callee should not be virtual %s", callee->name()->as_utf8());
 213     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 214 
 215     if (cg == nullptr) {
 216       // This is probably because the intrinsics is disabled from the command line
 217       char reason[256];
 218       jio_snprintf(reason, sizeof(reason), "cannot find an intrinsics for %s", callee->name()->as_utf8());
 219       C->record_method_not_compilable(reason);
 220       return nullptr;
 221     }
 222     return cg;
 223   } else if (allow_inline && allow_intrinsics) {
 224     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 225     if (cg != nullptr) {
 226       if (cg->is_predicated()) {
 227         // Code without intrinsic but, hopefully, inlined.
 228         CallGenerator* inline_cg = this->call_generator(callee,
 229               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 230         if (inline_cg != nullptr) {
 231           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 232         }
 233       }
 234 
 235       // If intrinsic does the virtual dispatch, we try to use the type profile
 236       // first, and hopefully inline it as the regular virtual call below.
 237       // We will retry the intrinsic if nothing had claimed it afterwards.
 238       if (cg->does_virtual_dispatch()) {
 239         cg_intrinsic = cg;
 240         cg = nullptr;
 241       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 242         return CallGenerator::for_late_inline(callee, cg);
 243       } else {

 659   // Speculative type of the receiver if any
 660   ciKlass* speculative_receiver_type = nullptr;
 661   if (is_virtual_or_interface) {
 662     Node* receiver_node             = stack(sp() - nargs);
 663     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 664     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 665     // For arrays, klass below is Object. When vtable calls are used,
 666     // resolving the call with Object would allow an illegal call to
 667     // finalize() on an array. We use holder instead: illegal calls to
 668     // finalize() won't be compiled as vtable calls (IC call
 669     // resolution will catch the illegal call) and the few legal calls
 670     // on array types won't be either.
 671     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 672                                       receiver_type, is_virtual,
 673                                       call_does_dispatch, vtable_index);  // out-parameters
 674     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 675   }
 676 
 677   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 678   ciKlass* receiver_constraint = nullptr;
 679   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 680     ciInstanceKlass* calling_klass = method()->holder();
 681     ciInstanceKlass* sender_klass = calling_klass;
 682     if (sender_klass->is_interface()) {
 683       receiver_constraint = sender_klass;
 684     }
 685   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 686     assert(holder->is_interface(), "How did we get a non-interface method here!");
 687     receiver_constraint = holder;
 688   }
 689 
 690   if (receiver_constraint != nullptr) {
 691     Node* receiver_node = stack(sp() - nargs);
 692     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 693     Node* bad_type_ctrl = nullptr;
 694     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 695     if (bad_type_ctrl != nullptr) {
 696       PreserveJVMState pjvms(this);
 697       set_control(bad_type_ctrl);
 698       uncommon_trap(Deoptimization::Reason_class_check,
 699                     Deoptimization::Action_none);
 700     }
 701     if (stopped()) {
 702       return; // MUST uncommon-trap?
 703     }
 704     set_stack(sp() - nargs, casted_receiver);
 705   }
 706 
 707   // Scalarize value objects passed into this invocation if we know that they are not larval
 708   for (int arg_idx = 0; arg_idx < nargs; arg_idx++) {
 709     if (arg_can_be_larval(callee, arg_idx)) {
 710       continue;
 711     }
 712 
 713     cast_to_non_larval(peek(nargs - 1 - arg_idx));
 714   }
 715 
 716   // Note:  It's OK to try to inline a virtual call.
 717   // The call generator will not attempt to inline a polymorphic call
 718   // unless it knows how to optimize the receiver dispatch.
 719   bool try_inline = (C->do_inlining() || InlineAccessors);
 720 
 721   // ---------------------
 722   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 723   JVMState* jvms = sync_jvms();
 724 
 725   // ---------------------
 726   // Decide call tactic.
 727   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 728   // It decides whether inlining is desirable or not.
 729   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
 730   if (failing()) {
 731     return;
 732   }
 733   assert(cg != nullptr, "must find a CallGenerator for callee %s", callee->name()->as_utf8());
 734 
 735   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 736   orig_callee = callee = nullptr;
 737 
 738   // ---------------------
 739 
 740   // Feed profiling data for arguments to the type system so it can
 741   // propagate it as speculative types
 742   record_profiled_arguments_for_speculation(cg->method(), bc());
 743 
 744 #ifndef PRODUCT
 745   // bump global counters for calls
 746   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 747 
 748   // Record first part of parsing work for this call
 749   parse_histogram()->record_change();
 750 #endif // not PRODUCT
 751 
 752   assert(jvms == this->jvms(), "still operating on the right JVMS");
 753   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 800 
 801   assert(check_call_consistency(jvms, cg), "inconsistent info");
 802 
 803   if (!stopped()) {
 804     // This was some sort of virtual call, which did a null check for us.
 805     // Now we can assert receiver-not-null, on the normal return path.
 806     if (receiver != nullptr && cg->is_virtual()) {
 807       Node* cast = cast_not_null(receiver);
 808       // %%% assert(receiver == cast, "should already have cast the receiver");
 809     }
 810 
 811     ciType* rtype = cg->method()->return_type();
 812     ciType* ctype = declared_signature->return_type();
 813 
 814     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 815       // Be careful here with return types.
 816       if (ctype != rtype) {
 817         BasicType rt = rtype->basic_type();
 818         BasicType ct = ctype->basic_type();
 819         if (ct == T_VOID) {
 820           // It's OK for a method to return a value that is discarded.
 821           // The discarding does not require any special action from the caller.
 822           // The Java code knows this, at VerifyType.isNullConversion.
 823           pop_node(rt);  // whatever it was, pop it
 824         } else if (rt == T_INT || is_subword_type(rt)) {
 825           // Nothing.  These cases are handled in lambda form bytecode.
 826           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 827         } else if (is_reference_type(rt)) {
 828           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 829           if (ctype->is_loaded()) {
 830             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 831             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 832             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 833               Node* retnode = pop();
 834               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 835               push(cast_obj);
 836             }
 837           }
 838         } else {
 839           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 840           // push a zero; it's better than getting an oop/int mismatch

 858     // If the return type of the method is not loaded, assert that the
 859     // value we got is a null.  Otherwise, we need to recompile.
 860     if (!rtype->is_loaded()) {
 861       if (PrintOpto && (Verbose || WizardMode)) {
 862         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 863         cg->method()->print_name(); tty->cr();
 864       }
 865       if (C->log() != nullptr) {
 866         C->log()->elem("assert_null reason='return' klass='%d'",
 867                        C->log()->identify(rtype));
 868       }
 869       // If there is going to be a trap, put it at the next bytecode:
 870       set_bci(iter().next_bci());
 871       null_assert(peek());
 872       set_bci(iter().cur_bci()); // put it back
 873     }
 874     BasicType ct = ctype->basic_type();
 875     if (is_reference_type(ct)) {
 876       record_profiled_return_for_speculation();
 877     }
 878 
 879     if (!rtype->is_void() && cg->method()->intrinsic_id() != vmIntrinsicID::_makePrivateBuffer) {
 880       Node* retnode = peek();
 881       const Type* rettype = gvn().type(retnode);
 882       if (rettype->is_inlinetypeptr() && !retnode->is_InlineType()) {
 883         retnode = InlineTypeNode::make_from_oop(this, retnode, rettype->inline_klass());
 884         dec_sp(1);
 885         push(retnode);
 886       }
 887     }
 888 
 889     if (cg->method()->is_object_constructor() && receiver != nullptr && gvn().type(receiver)->is_inlinetypeptr()) {
 890       InlineTypeNode* non_larval = InlineTypeNode::make_from_oop(this, receiver, gvn().type(receiver)->inline_klass());
 891       // Relinquish the oop input, we will delay the allocation to the point it is needed, see the
 892       // comments in InlineTypeNode::Ideal for more details
 893       non_larval = non_larval->clone_if_required(&gvn(), nullptr);
 894       non_larval->set_oop(gvn(), null());
 895       non_larval->set_is_buffered(gvn(), false);
 896       non_larval = gvn().transform(non_larval)->as_InlineType();
 897       map()->replace_edge(receiver, non_larval);
 898     }
 899   }
 900 
 901   // Restart record of parsing work after possible inlining of call
 902 #ifndef PRODUCT
 903   parse_histogram()->set_initial_state(bc());
 904 #endif
 905 }
 906 
 907 //---------------------------catch_call_exceptions-----------------------------
 908 // Put a Catch and CatchProj nodes behind a just-created call.
 909 // Send their caught exceptions to the proper handler.
 910 // This may be used after a call to the rethrow VM stub,
 911 // when it is needed to process unloaded exception classes.
 912 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 913   // Exceptions are delivered through this channel:
 914   Node* i_o = this->i_o();
 915 
 916   // Add a CatchNode.
 917   Arena tmp_mem{mtCompiler};
 918   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
< prev index next >