< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"

  28 #include "classfile/vmSymbols.hpp"
  29 #include "compiler/compileBroker.hpp"
  30 #include "compiler/compileLog.hpp"
  31 #include "interpreter/linkResolver.hpp"

  32 #include "logging/log.hpp"
  33 #include "logging/logLevel.hpp"
  34 #include "logging/logMessage.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "opto/addnode.hpp"
  37 #include "opto/callGenerator.hpp"
  38 #include "opto/castnode.hpp"
  39 #include "opto/cfgnode.hpp"

  40 #include "opto/mulnode.hpp"
  41 #include "opto/parse.hpp"
  42 #include "opto/rootnode.hpp"
  43 #include "opto/runtime.hpp"
  44 #include "opto/subnode.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "utilities/macros.hpp"
  48 #if INCLUDE_JFR
  49 #include "jfr/jfr.hpp"
  50 #endif
  51 
  52 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  53                                      bool with_deco) {
  54   if (with_deco) {
  55     CompileTask::print_inline_indent(depth, out);
  56   }
  57   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  58   prof_klass->name()->print_symbol_on(out);
  59   if (with_deco) {

  69     if (!C->print_inlining()) {
  70       if (!PrintOpto && !PrintCompilation) {
  71         method->print_short_name();
  72         tty->cr();
  73       }
  74       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  75       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  76     } else {
  77       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  78       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  79     }
  80   }
  81 
  82   LogTarget(Debug, jit, inlining) lt;
  83   if (lt.is_enabled()) {
  84     LogStream ls(lt);
  85     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  86   }
  87 }
  88 

























































  89 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
  90                                        JVMState* jvms, bool allow_inline,
  91                                        float prof_factor, ciKlass* speculative_receiver_type,
  92                                        bool allow_intrinsics) {
  93   assert(callee != nullptr, "failed method resolution");
  94 
  95   ciMethod*       caller      = jvms->method();
  96   int             bci         = jvms->bci();
  97   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
  98   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
  99 
 100   const bool is_virtual = (bytecode == Bytecodes::_invokevirtual) || (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual);
 101   const bool is_interface = (bytecode == Bytecodes::_invokeinterface) || (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 102   const bool is_virtual_or_interface = is_virtual || is_interface;
 103 
 104   const bool check_access = !orig_callee->is_method_handle_intrinsic(); // method handle intrinsics don't perform access checks
 105 
 106   // Dtrace currently doesn't work unless all calls are vanilla
 107   if (env()->dtrace_method_probes()) {
 108     allow_inline = false;

 129     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 130                     log->identify(callee), site_count, prof_factor);
 131     if (call_does_dispatch)  log->print(" virtual='1'");
 132     if (allow_inline)     log->print(" inline='1'");
 133     if (receiver_count >= 0) {
 134       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 135       if (profile.has_receiver(1)) {
 136         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 137       }
 138     }
 139     if (callee->is_method_handle_intrinsic()) {
 140       log->print(" method_handle_intrinsic='1'");
 141     }
 142     log->end_elem();
 143   }
 144 
 145   // Special case the handling of certain common, profitable library
 146   // methods.  If these methods are replaced with specialized code,
 147   // then we return it as the inlined version of the call.
 148   CallGenerator* cg_intrinsic = nullptr;
 149   if (allow_inline && allow_intrinsics) {














 150     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 151     if (cg != nullptr) {
 152       if (cg->is_predicated()) {
 153         // Code without intrinsic but, hopefully, inlined.
 154         CallGenerator* inline_cg = this->call_generator(callee,
 155               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 156         if (inline_cg != nullptr) {
 157           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 158         }
 159       }
 160 
 161       // If intrinsic does the virtual dispatch, we try to use the type profile
 162       // first, and hopefully inline it as the regular virtual call below.
 163       // We will retry the intrinsic if nothing had claimed it afterwards.
 164       if (cg->does_virtual_dispatch()) {
 165         cg_intrinsic = cg;
 166         cg = nullptr;
 167       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 168         return CallGenerator::for_late_inline(callee, cg);
 169       } else {

 597   // Speculative type of the receiver if any
 598   ciKlass* speculative_receiver_type = nullptr;
 599   if (is_virtual_or_interface) {
 600     Node* receiver_node             = stack(sp() - nargs);
 601     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 602     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 603     // For arrays, klass below is Object. When vtable calls are used,
 604     // resolving the call with Object would allow an illegal call to
 605     // finalize() on an array. We use holder instead: illegal calls to
 606     // finalize() won't be compiled as vtable calls (IC call
 607     // resolution will catch the illegal call) and the few legal calls
 608     // on array types won't be either.
 609     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 610                                       receiver_type, is_virtual,
 611                                       call_does_dispatch, vtable_index);  // out-parameters
 612     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 613   }
 614 
 615   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 616   ciKlass* receiver_constraint = nullptr;
 617   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 618     ciInstanceKlass* calling_klass = method()->holder();
 619     ciInstanceKlass* sender_klass = calling_klass;
 620     if (sender_klass->is_interface()) {
 621       receiver_constraint = sender_klass;
 622     }
 623   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 624     assert(holder->is_interface(), "How did we get a non-interface method here!");
 625     receiver_constraint = holder;
 626   }
 627 
 628   if (receiver_constraint != nullptr) {
 629     Node* receiver_node = stack(sp() - nargs);
 630     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 631     Node* bad_type_ctrl = nullptr;
 632     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 633     if (bad_type_ctrl != nullptr) {
 634       PreserveJVMState pjvms(this);
 635       set_control(bad_type_ctrl);
 636       uncommon_trap(Deoptimization::Reason_class_check,
 637                     Deoptimization::Action_none);
 638     }
 639     if (stopped()) {
 640       return; // MUST uncommon-trap?
 641     }
 642     set_stack(sp() - nargs, casted_receiver);
 643   }
 644 









 645   // Note:  It's OK to try to inline a virtual call.
 646   // The call generator will not attempt to inline a polymorphic call
 647   // unless it knows how to optimize the receiver dispatch.
 648   bool try_inline = (C->do_inlining() || InlineAccessors);
 649 
 650   // ---------------------
 651   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 652   JVMState* jvms = sync_jvms();
 653 
 654   // ---------------------
 655   // Decide call tactic.
 656   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 657   // It decides whether inlining is desirable or not.
 658   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);




 659 
 660   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 661   orig_callee = callee = nullptr;
 662 
 663   // ---------------------
 664 
 665   // Feed profiling data for arguments to the type system so it can
 666   // propagate it as speculative types
 667   record_profiled_arguments_for_speculation(cg->method(), bc());
 668 
 669 #ifndef PRODUCT
 670   // bump global counters for calls
 671   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 672 
 673   // Record first part of parsing work for this call
 674   parse_histogram()->record_change();
 675 #endif // not PRODUCT
 676 
 677   assert(jvms == this->jvms(), "still operating on the right JVMS");
 678   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 725 
 726   assert(check_call_consistency(jvms, cg), "inconsistent info");
 727 
 728   if (!stopped()) {
 729     // This was some sort of virtual call, which did a null check for us.
 730     // Now we can assert receiver-not-null, on the normal return path.
 731     if (receiver != nullptr && cg->is_virtual()) {
 732       Node* cast = cast_not_null(receiver);
 733       // %%% assert(receiver == cast, "should already have cast the receiver");
 734     }
 735 
 736     ciType* rtype = cg->method()->return_type();
 737     ciType* ctype = declared_signature->return_type();
 738 
 739     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 740       // Be careful here with return types.
 741       if (ctype != rtype) {
 742         BasicType rt = rtype->basic_type();
 743         BasicType ct = ctype->basic_type();
 744         if (ct == T_VOID) {
 745           // It's OK for a method  to return a value that is discarded.
 746           // The discarding does not require any special action from the caller.
 747           // The Java code knows this, at VerifyType.isNullConversion.
 748           pop_node(rt);  // whatever it was, pop it
 749         } else if (rt == T_INT || is_subword_type(rt)) {
 750           // Nothing.  These cases are handled in lambda form bytecode.
 751           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 752         } else if (is_reference_type(rt)) {
 753           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 754           if (ctype->is_loaded()) {
 755             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 756             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 757             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 758               Node* retnode = pop();
 759               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 760               push(cast_obj);
 761             }
 762           }
 763         } else {
 764           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 765           // push a zero; it's better than getting an oop/int mismatch

 783     // If the return type of the method is not loaded, assert that the
 784     // value we got is a null.  Otherwise, we need to recompile.
 785     if (!rtype->is_loaded()) {
 786       if (PrintOpto && (Verbose || WizardMode)) {
 787         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 788         cg->method()->print_name(); tty->cr();
 789       }
 790       if (C->log() != nullptr) {
 791         C->log()->elem("assert_null reason='return' klass='%d'",
 792                        C->log()->identify(rtype));
 793       }
 794       // If there is going to be a trap, put it at the next bytecode:
 795       set_bci(iter().next_bci());
 796       null_assert(peek());
 797       set_bci(iter().cur_bci()); // put it back
 798     }
 799     BasicType ct = ctype->basic_type();
 800     if (is_reference_type(ct)) {
 801       record_profiled_return_for_speculation();
 802     }





















 803   }
 804 
 805   // Restart record of parsing work after possible inlining of call
 806 #ifndef PRODUCT
 807   parse_histogram()->set_initial_state(bc());
 808 #endif
 809 }
 810 
 811 //---------------------------catch_call_exceptions-----------------------------
 812 // Put a Catch and CatchProj nodes behind a just-created call.
 813 // Send their caught exceptions to the proper handler.
 814 // This may be used after a call to the rethrow VM stub,
 815 // when it is needed to process unloaded exception classes.
 816 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 817   // Exceptions are delivered through this channel:
 818   Node* i_o = this->i_o();
 819 
 820   // Add a CatchNode.
 821   Arena tmp_mem{mtCompiler};
 822   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);

   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "ci/ciCallSite.hpp"
  26 #include "ci/ciMethodHandle.hpp"
  27 #include "ci/ciSymbols.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "jvm_io.h"
  34 #include "logging/log.hpp"
  35 #include "logging/logLevel.hpp"
  36 #include "logging/logMessage.hpp"
  37 #include "logging/logStream.hpp"
  38 #include "opto/addnode.hpp"
  39 #include "opto/callGenerator.hpp"
  40 #include "opto/castnode.hpp"
  41 #include "opto/cfgnode.hpp"
  42 #include "opto/inlinetypenode.hpp"
  43 #include "opto/mulnode.hpp"
  44 #include "opto/parse.hpp"
  45 #include "opto/rootnode.hpp"
  46 #include "opto/runtime.hpp"
  47 #include "opto/subnode.hpp"
  48 #include "prims/methodHandles.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "utilities/macros.hpp"
  51 #if INCLUDE_JFR
  52 #include "jfr/jfr.hpp"
  53 #endif
  54 
  55 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
  56                                      bool with_deco) {
  57   if (with_deco) {
  58     CompileTask::print_inline_indent(depth, out);
  59   }
  60   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  61   prof_klass->name()->print_symbol_on(out);
  62   if (with_deco) {

  72     if (!C->print_inlining()) {
  73       if (!PrintOpto && !PrintCompilation) {
  74         method->print_short_name();
  75         tty->cr();
  76       }
  77       CompileTask::print_inlining_tty(prof_method, depth, bci, InliningResult::SUCCESS);
  78       print_trace_type_profile(tty, depth, prof_klass, site_count, receiver_count, true);
  79     } else {
  80       auto stream = C->inline_printer()->record(method, jvms, InliningResult::SUCCESS);
  81       print_trace_type_profile(stream, depth, prof_klass, site_count, receiver_count, false);
  82     }
  83   }
  84 
  85   LogTarget(Debug, jit, inlining) lt;
  86   if (lt.is_enabled()) {
  87     LogStream ls(lt);
  88     print_trace_type_profile(&ls, depth, prof_klass, site_count, receiver_count, true);
  89   }
  90 }
  91 
  92 static bool arg_can_be_larval(ciMethod* callee, int arg_idx) {
  93   if (callee->is_object_constructor() && arg_idx == 0) {
  94     return true;
  95   }
  96 
  97   if (arg_idx != 1 || callee->intrinsic_id() == vmIntrinsicID::_none) {
  98     return false;
  99   }
 100 
 101   switch (callee->intrinsic_id()) {
 102     case vmIntrinsicID::_finishPrivateBuffer:
 103     case vmIntrinsicID::_putBoolean:
 104     case vmIntrinsicID::_putBooleanOpaque:
 105     case vmIntrinsicID::_putBooleanRelease:
 106     case vmIntrinsicID::_putBooleanVolatile:
 107     case vmIntrinsicID::_putByte:
 108     case vmIntrinsicID::_putByteOpaque:
 109     case vmIntrinsicID::_putByteRelease:
 110     case vmIntrinsicID::_putByteVolatile:
 111     case vmIntrinsicID::_putChar:
 112     case vmIntrinsicID::_putCharOpaque:
 113     case vmIntrinsicID::_putCharRelease:
 114     case vmIntrinsicID::_putCharUnaligned:
 115     case vmIntrinsicID::_putCharVolatile:
 116     case vmIntrinsicID::_putShort:
 117     case vmIntrinsicID::_putShortOpaque:
 118     case vmIntrinsicID::_putShortRelease:
 119     case vmIntrinsicID::_putShortUnaligned:
 120     case vmIntrinsicID::_putShortVolatile:
 121     case vmIntrinsicID::_putInt:
 122     case vmIntrinsicID::_putIntOpaque:
 123     case vmIntrinsicID::_putIntRelease:
 124     case vmIntrinsicID::_putIntUnaligned:
 125     case vmIntrinsicID::_putIntVolatile:
 126     case vmIntrinsicID::_putLong:
 127     case vmIntrinsicID::_putLongOpaque:
 128     case vmIntrinsicID::_putLongRelease:
 129     case vmIntrinsicID::_putLongUnaligned:
 130     case vmIntrinsicID::_putLongVolatile:
 131     case vmIntrinsicID::_putFloat:
 132     case vmIntrinsicID::_putFloatOpaque:
 133     case vmIntrinsicID::_putFloatRelease:
 134     case vmIntrinsicID::_putFloatVolatile:
 135     case vmIntrinsicID::_putDouble:
 136     case vmIntrinsicID::_putDoubleOpaque:
 137     case vmIntrinsicID::_putDoubleRelease:
 138     case vmIntrinsicID::_putDoubleVolatile:
 139     case vmIntrinsicID::_putReference:
 140     case vmIntrinsicID::_putReferenceOpaque:
 141     case vmIntrinsicID::_putReferenceRelease:
 142     case vmIntrinsicID::_putReferenceVolatile:
 143       return true;
 144     default:
 145       return false;
 146   }
 147 }
 148 
 149 CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool call_does_dispatch,
 150                                        JVMState* jvms, bool allow_inline,
 151                                        float prof_factor, ciKlass* speculative_receiver_type,
 152                                        bool allow_intrinsics) {
 153   assert(callee != nullptr, "failed method resolution");
 154 
 155   ciMethod*       caller      = jvms->method();
 156   int             bci         = jvms->bci();
 157   Bytecodes::Code bytecode    = caller->java_code_at_bci(bci);
 158   ciMethod*       orig_callee = caller->get_method_at_bci(bci);
 159 
 160   const bool is_virtual = (bytecode == Bytecodes::_invokevirtual) || (orig_callee->intrinsic_id() == vmIntrinsics::_linkToVirtual);
 161   const bool is_interface = (bytecode == Bytecodes::_invokeinterface) || (orig_callee->intrinsic_id() == vmIntrinsics::_linkToInterface);
 162   const bool is_virtual_or_interface = is_virtual || is_interface;
 163 
 164   const bool check_access = !orig_callee->is_method_handle_intrinsic(); // method handle intrinsics don't perform access checks
 165 
 166   // Dtrace currently doesn't work unless all calls are vanilla
 167   if (env()->dtrace_method_probes()) {
 168     allow_inline = false;

 189     log->begin_elem("call method='%d' count='%d' prof_factor='%f'",
 190                     log->identify(callee), site_count, prof_factor);
 191     if (call_does_dispatch)  log->print(" virtual='1'");
 192     if (allow_inline)     log->print(" inline='1'");
 193     if (receiver_count >= 0) {
 194       log->print(" receiver='%d' receiver_count='%d'", rid, receiver_count);
 195       if (profile.has_receiver(1)) {
 196         log->print(" receiver2='%d' receiver2_count='%d'", r2id, profile.receiver_count(1));
 197       }
 198     }
 199     if (callee->is_method_handle_intrinsic()) {
 200       log->print(" method_handle_intrinsic='1'");
 201     }
 202     log->end_elem();
 203   }
 204 
 205   // Special case the handling of certain common, profitable library
 206   // methods.  If these methods are replaced with specialized code,
 207   // then we return it as the inlined version of the call.
 208   CallGenerator* cg_intrinsic = nullptr;
 209   if (callee->intrinsic_id() == vmIntrinsics::_makePrivateBuffer || callee->intrinsic_id() == vmIntrinsics::_finishPrivateBuffer) {
 210     // These methods must be inlined so that we don't have larval value objects crossing method
 211     // boundaries
 212     assert(!call_does_dispatch, "callee should not be virtual %s", callee->name()->as_utf8());
 213     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 214 
 215     if (cg == nullptr) {
 216       // This is probably because the intrinsics is disabled from the command line
 217       char reason[256];
 218       jio_snprintf(reason, sizeof(reason), "cannot find an intrinsics for %s", callee->name()->as_utf8());
 219       C->record_method_not_compilable(reason);
 220       return nullptr;
 221     }
 222     return cg;
 223   } else if (allow_inline && allow_intrinsics) {
 224     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
 225     if (cg != nullptr) {
 226       if (cg->is_predicated()) {
 227         // Code without intrinsic but, hopefully, inlined.
 228         CallGenerator* inline_cg = this->call_generator(callee,
 229               vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, speculative_receiver_type, false);
 230         if (inline_cg != nullptr) {
 231           cg = CallGenerator::for_predicated_intrinsic(cg, inline_cg);
 232         }
 233       }
 234 
 235       // If intrinsic does the virtual dispatch, we try to use the type profile
 236       // first, and hopefully inline it as the regular virtual call below.
 237       // We will retry the intrinsic if nothing had claimed it afterwards.
 238       if (cg->does_virtual_dispatch()) {
 239         cg_intrinsic = cg;
 240         cg = nullptr;
 241       } else if (IncrementalInline && should_delay_vector_inlining(callee, jvms)) {
 242         return CallGenerator::for_late_inline(callee, cg);
 243       } else {

 671   // Speculative type of the receiver if any
 672   ciKlass* speculative_receiver_type = nullptr;
 673   if (is_virtual_or_interface) {
 674     Node* receiver_node             = stack(sp() - nargs);
 675     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 676     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 677     // For arrays, klass below is Object. When vtable calls are used,
 678     // resolving the call with Object would allow an illegal call to
 679     // finalize() on an array. We use holder instead: illegal calls to
 680     // finalize() won't be compiled as vtable calls (IC call
 681     // resolution will catch the illegal call) and the few legal calls
 682     // on array types won't be either.
 683     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 684                                       receiver_type, is_virtual,
 685                                       call_does_dispatch, vtable_index);  // out-parameters
 686     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 687   }
 688 
 689   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 690   ciKlass* receiver_constraint = nullptr;
 691   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 692     ciInstanceKlass* calling_klass = method()->holder();
 693     ciInstanceKlass* sender_klass = calling_klass;
 694     if (sender_klass->is_interface()) {
 695       receiver_constraint = sender_klass;
 696     }
 697   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 698     assert(holder->is_interface(), "How did we get a non-interface method here!");
 699     receiver_constraint = holder;
 700   }
 701 
 702   if (receiver_constraint != nullptr) {
 703     Node* receiver_node = stack(sp() - nargs);
 704     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 705     Node* bad_type_ctrl = nullptr;
 706     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 707     if (bad_type_ctrl != nullptr) {
 708       PreserveJVMState pjvms(this);
 709       set_control(bad_type_ctrl);
 710       uncommon_trap(Deoptimization::Reason_class_check,
 711                     Deoptimization::Action_none);
 712     }
 713     if (stopped()) {
 714       return; // MUST uncommon-trap?
 715     }
 716     set_stack(sp() - nargs, casted_receiver);
 717   }
 718 
 719   // Scalarize value objects passed into this invocation if we know that they are not larval
 720   for (int arg_idx = 0; arg_idx < nargs; arg_idx++) {
 721     if (arg_can_be_larval(callee, arg_idx)) {
 722       continue;
 723     }
 724 
 725     cast_to_non_larval(peek(nargs - 1 - arg_idx));
 726   }
 727 
 728   // Note:  It's OK to try to inline a virtual call.
 729   // The call generator will not attempt to inline a polymorphic call
 730   // unless it knows how to optimize the receiver dispatch.
 731   bool try_inline = (C->do_inlining() || InlineAccessors);
 732 
 733   // ---------------------
 734   dec_sp(nargs);              // Temporarily pop args for JVM state of call
 735   JVMState* jvms = sync_jvms();
 736 
 737   // ---------------------
 738   // Decide call tactic.
 739   // This call checks with CHA, the interpreter profile, intrinsics table, etc.
 740   // It decides whether inlining is desirable or not.
 741   CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
 742   if (failing()) {
 743     return;
 744   }
 745   assert(cg != nullptr, "must find a CallGenerator for callee %s", callee->name()->as_utf8());
 746 
 747   // NOTE:  Don't use orig_callee and callee after this point!  Use cg->method() instead.
 748   orig_callee = callee = nullptr;
 749 
 750   // ---------------------
 751 
 752   // Feed profiling data for arguments to the type system so it can
 753   // propagate it as speculative types
 754   record_profiled_arguments_for_speculation(cg->method(), bc());
 755 
 756 #ifndef PRODUCT
 757   // bump global counters for calls
 758   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 759 
 760   // Record first part of parsing work for this call
 761   parse_histogram()->record_change();
 762 #endif // not PRODUCT
 763 
 764   assert(jvms == this->jvms(), "still operating on the right JVMS");
 765   assert(jvms_in_sync(),       "jvms must carry full info into CG");

 812 
 813   assert(check_call_consistency(jvms, cg), "inconsistent info");
 814 
 815   if (!stopped()) {
 816     // This was some sort of virtual call, which did a null check for us.
 817     // Now we can assert receiver-not-null, on the normal return path.
 818     if (receiver != nullptr && cg->is_virtual()) {
 819       Node* cast = cast_not_null(receiver);
 820       // %%% assert(receiver == cast, "should already have cast the receiver");
 821     }
 822 
 823     ciType* rtype = cg->method()->return_type();
 824     ciType* ctype = declared_signature->return_type();
 825 
 826     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 827       // Be careful here with return types.
 828       if (ctype != rtype) {
 829         BasicType rt = rtype->basic_type();
 830         BasicType ct = ctype->basic_type();
 831         if (ct == T_VOID) {
 832           // It's OK for a method to return a value that is discarded.
 833           // The discarding does not require any special action from the caller.
 834           // The Java code knows this, at VerifyType.isNullConversion.
 835           pop_node(rt);  // whatever it was, pop it
 836         } else if (rt == T_INT || is_subword_type(rt)) {
 837           // Nothing.  These cases are handled in lambda form bytecode.
 838           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 839         } else if (is_reference_type(rt)) {
 840           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 841           if (ctype->is_loaded()) {
 842             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 843             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 844             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 845               Node* retnode = pop();
 846               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 847               push(cast_obj);
 848             }
 849           }
 850         } else {
 851           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 852           // push a zero; it's better than getting an oop/int mismatch

 870     // If the return type of the method is not loaded, assert that the
 871     // value we got is a null.  Otherwise, we need to recompile.
 872     if (!rtype->is_loaded()) {
 873       if (PrintOpto && (Verbose || WizardMode)) {
 874         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 875         cg->method()->print_name(); tty->cr();
 876       }
 877       if (C->log() != nullptr) {
 878         C->log()->elem("assert_null reason='return' klass='%d'",
 879                        C->log()->identify(rtype));
 880       }
 881       // If there is going to be a trap, put it at the next bytecode:
 882       set_bci(iter().next_bci());
 883       null_assert(peek());
 884       set_bci(iter().cur_bci()); // put it back
 885     }
 886     BasicType ct = ctype->basic_type();
 887     if (is_reference_type(ct)) {
 888       record_profiled_return_for_speculation();
 889     }
 890 
 891     if (!rtype->is_void() && cg->method()->intrinsic_id() != vmIntrinsicID::_makePrivateBuffer) {
 892       Node* retnode = peek();
 893       const Type* rettype = gvn().type(retnode);
 894       if (rettype->is_inlinetypeptr() && !retnode->is_InlineType()) {
 895         retnode = InlineTypeNode::make_from_oop(this, retnode, rettype->inline_klass());
 896         dec_sp(1);
 897         push(retnode);
 898       }
 899     }
 900 
 901     if (cg->method()->is_object_constructor() && receiver != nullptr && gvn().type(receiver)->is_inlinetypeptr()) {
 902       InlineTypeNode* non_larval = InlineTypeNode::make_from_oop(this, receiver, gvn().type(receiver)->inline_klass());
 903       // Relinquish the oop input, we will delay the allocation to the point it is needed, see the
 904       // comments in InlineTypeNode::Ideal for more details
 905       non_larval = non_larval->clone_if_required(&gvn(), nullptr);
 906       non_larval->set_oop(gvn(), null());
 907       non_larval->set_is_buffered(gvn(), false);
 908       non_larval = gvn().transform(non_larval)->as_InlineType();
 909       map()->replace_edge(receiver, non_larval);
 910     }
 911   }
 912 
 913   // Restart record of parsing work after possible inlining of call
 914 #ifndef PRODUCT
 915   parse_histogram()->set_initial_state(bc());
 916 #endif
 917 }
 918 
 919 //---------------------------catch_call_exceptions-----------------------------
 920 // Put a Catch and CatchProj nodes behind a just-created call.
 921 // Send their caught exceptions to the proper handler.
 922 // This may be used after a call to the rethrow VM stub,
 923 // when it is needed to process unloaded exception classes.
 924 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 925   // Exceptions are delivered through this channel:
 926   Node* i_o = this->i_o();
 927 
 928   // Add a CatchNode.
 929   Arena tmp_mem{mtCompiler};
 930   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
< prev index next >