< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "logging/log.hpp"
  34 #include "logging/logLevel.hpp"
  35 #include "logging/logMessage.hpp"
  36 #include "logging/logStream.hpp"
  37 #include "opto/addnode.hpp"
  38 #include "opto/callGenerator.hpp"
  39 #include "opto/castnode.hpp"
  40 #include "opto/cfgnode.hpp"

  41 #include "opto/mulnode.hpp"
  42 #include "opto/parse.hpp"
  43 #include "opto/rootnode.hpp"
  44 #include "opto/runtime.hpp"
  45 #include "opto/subnode.hpp"
  46 #include "prims/methodHandles.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "utilities/macros.hpp"
  49 #if INCLUDE_JFR
  50 #include "jfr/jfr.hpp"
  51 #endif
  52 
  53 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
  54   CompileTask::print_inline_indent(depth, out);
  55   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  56   prof_klass->name()->print_symbol_on(out);
  57   out->cr();
  58 }
  59 
  60 static void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,

 560   // Push appendix argument (MethodType, CallSite, etc.), if one.
 561   if (iter().has_appendix()) {
 562     ciObject* appendix_arg = iter().get_appendix();
 563     const TypeOopPtr* appendix_arg_type = TypeOopPtr::make_from_constant(appendix_arg, /* require_const= */ true);
 564     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 565     push(appendix_arg_node);
 566   }
 567 
 568   // ---------------------
 569   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 570   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 571   // Does the call-site type profile reveal only one receiver?
 572   // Then we may introduce a run-time check and inline on the path where it succeeds.
 573   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 574 
 575   // Try to get the most accurate receiver type
 576   ciMethod* callee             = orig_callee;
 577   int       vtable_index       = Method::invalid_vtable_index;
 578   bool      call_does_dispatch = false;
 579 



















 580   // Speculative type of the receiver if any
 581   ciKlass* speculative_receiver_type = nullptr;
 582   if (is_virtual_or_interface) {
 583     Node* receiver_node             = stack(sp() - nargs);
 584     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 585     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 586     // For arrays, klass below is Object. When vtable calls are used,
 587     // resolving the call with Object would allow an illegal call to
 588     // finalize() on an array. We use holder instead: illegal calls to
 589     // finalize() won't be compiled as vtable calls (IC call
 590     // resolution will catch the illegal call) and the few legal calls
 591     // on array types won't be either.
 592     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 593                                       receiver_type, is_virtual,
 594                                       call_does_dispatch, vtable_index);  // out-parameters
 595     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 596   }
 597 
 598   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 599   ciKlass* receiver_constraint = nullptr;
 600   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 601     ciInstanceKlass* calling_klass = method()->holder();
 602     ciInstanceKlass* sender_klass = calling_klass;
 603     if (sender_klass->is_interface()) {
 604       receiver_constraint = sender_klass;
 605     }
 606   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 607     assert(holder->is_interface(), "How did we get a non-interface method here!");
 608     receiver_constraint = holder;
 609   }
 610 
 611   if (receiver_constraint != nullptr) {
 612     Node* receiver_node = stack(sp() - nargs);
 613     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 614     Node* bad_type_ctrl = nullptr;
 615     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 616     if (bad_type_ctrl != nullptr) {
 617       PreserveJVMState pjvms(this);
 618       set_control(bad_type_ctrl);
 619       uncommon_trap(Deoptimization::Reason_class_check,
 620                     Deoptimization::Action_none);

 710 
 711   assert(check_call_consistency(jvms, cg), "inconsistent info");
 712 
 713   if (!stopped()) {
 714     // This was some sort of virtual call, which did a null check for us.
 715     // Now we can assert receiver-not-null, on the normal return path.
 716     if (receiver != nullptr && cg->is_virtual()) {
 717       Node* cast = cast_not_null(receiver);
 718       // %%% assert(receiver == cast, "should already have cast the receiver");
 719     }
 720 
 721     ciType* rtype = cg->method()->return_type();
 722     ciType* ctype = declared_signature->return_type();
 723 
 724     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 725       // Be careful here with return types.
 726       if (ctype != rtype) {
 727         BasicType rt = rtype->basic_type();
 728         BasicType ct = ctype->basic_type();
 729         if (ct == T_VOID) {
 730           // It's OK for a method  to return a value that is discarded.
 731           // The discarding does not require any special action from the caller.
 732           // The Java code knows this, at VerifyType.isNullConversion.
 733           pop_node(rt);  // whatever it was, pop it
 734         } else if (rt == T_INT || is_subword_type(rt)) {
 735           // Nothing.  These cases are handled in lambda form bytecode.
 736           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 737         } else if (is_reference_type(rt)) {
 738           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 739           if (ctype->is_loaded()) {
 740             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 741             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 742             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 743               Node* retnode = pop();
 744               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 745               push(cast_obj);
 746             }
 747           }
 748         } else {
 749           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 750           // push a zero; it's better than getting an oop/int mismatch

 768     // If the return type of the method is not loaded, assert that the
 769     // value we got is a null.  Otherwise, we need to recompile.
 770     if (!rtype->is_loaded()) {
 771       if (PrintOpto && (Verbose || WizardMode)) {
 772         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 773         cg->method()->print_name(); tty->cr();
 774       }
 775       if (C->log() != nullptr) {
 776         C->log()->elem("assert_null reason='return' klass='%d'",
 777                        C->log()->identify(rtype));
 778       }
 779       // If there is going to be a trap, put it at the next bytecode:
 780       set_bci(iter().next_bci());
 781       null_assert(peek());
 782       set_bci(iter().cur_bci()); // put it back
 783     }
 784     BasicType ct = ctype->basic_type();
 785     if (is_reference_type(ct)) {
 786       record_profiled_return_for_speculation();
 787     }















 788   }
 789 
 790   // Restart record of parsing work after possible inlining of call
 791 #ifndef PRODUCT
 792   parse_histogram()->set_initial_state(bc());
 793 #endif
 794 }
 795 
 796 //---------------------------catch_call_exceptions-----------------------------
 797 // Put a Catch and CatchProj nodes behind a just-created call.
 798 // Send their caught exceptions to the proper handler.
 799 // This may be used after a call to the rethrow VM stub,
 800 // when it is needed to process unloaded exception classes.
 801 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 802   // Exceptions are delivered through this channel:
 803   Node* i_o = this->i_o();
 804 
 805   // Add a CatchNode.
 806   Arena tmp_mem{mtCompiler};
 807   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "logging/log.hpp"
  34 #include "logging/logLevel.hpp"
  35 #include "logging/logMessage.hpp"
  36 #include "logging/logStream.hpp"
  37 #include "opto/addnode.hpp"
  38 #include "opto/callGenerator.hpp"
  39 #include "opto/castnode.hpp"
  40 #include "opto/cfgnode.hpp"
  41 #include "opto/inlinetypenode.hpp"
  42 #include "opto/mulnode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/rootnode.hpp"
  45 #include "opto/runtime.hpp"
  46 #include "opto/subnode.hpp"
  47 #include "prims/methodHandles.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "utilities/macros.hpp"
  50 #if INCLUDE_JFR
  51 #include "jfr/jfr.hpp"
  52 #endif
  53 
  54 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
  55   CompileTask::print_inline_indent(depth, out);
  56   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  57   prof_klass->name()->print_symbol_on(out);
  58   out->cr();
  59 }
  60 
  61 static void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,

 561   // Push appendix argument (MethodType, CallSite, etc.), if one.
 562   if (iter().has_appendix()) {
 563     ciObject* appendix_arg = iter().get_appendix();
 564     const TypeOopPtr* appendix_arg_type = TypeOopPtr::make_from_constant(appendix_arg, /* require_const= */ true);
 565     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 566     push(appendix_arg_node);
 567   }
 568 
 569   // ---------------------
 570   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 571   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 572   // Does the call-site type profile reveal only one receiver?
 573   // Then we may introduce a run-time check and inline on the path where it succeeds.
 574   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 575 
 576   // Try to get the most accurate receiver type
 577   ciMethod* callee             = orig_callee;
 578   int       vtable_index       = Method::invalid_vtable_index;
 579   bool      call_does_dispatch = false;
 580 
 581   // Detect the call to the object or abstract class constructor at the end of a value constructor to know when we are done initializing the larval
 582   if (orig_callee->is_object_constructor() && (orig_callee->holder()->is_abstract() || orig_callee->holder()->is_java_lang_Object()) && stack(sp() - nargs)->is_InlineType()) {
 583     assert(method()->is_object_constructor() && (method()->holder()->is_inlinetype() || method()->holder()->is_abstract()), "Unexpected caller");
 584     InlineTypeNode* receiver = stack(sp() - nargs)->as_InlineType();
 585     // TODO 8325106 re-enable the assert and add the same check for the receiver in the caller map
 586     //assert(receiver->is_larval(), "must be larval");
 587     InlineTypeNode* clone = receiver->clone_if_required(&_gvn, _map);
 588     clone->set_is_larval(false);
 589     clone = _gvn.transform(clone)->as_InlineType();
 590     replace_in_map(receiver, clone);
 591 
 592     if (_caller->has_method()) {
 593       // Get receiver from the caller map and update it in the exit map now that we are done initializing it
 594       Node* receiver_in_caller = _caller->map()->argument(_caller, 0);
 595       assert(receiver_in_caller->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(), "Receiver type mismatch");
 596       _exits.map()->replace_edge(receiver_in_caller, clone, &_gvn);
 597     }
 598   }
 599 
 600   // Speculative type of the receiver if any
 601   ciKlass* speculative_receiver_type = nullptr;
 602   if (is_virtual_or_interface) {
 603     Node* receiver_node             = stack(sp() - nargs);
 604     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 605     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 606     // For arrays, klass below is Object. When vtable calls are used,
 607     // resolving the call with Object would allow an illegal call to
 608     // finalize() on an array. We use holder instead: illegal calls to
 609     // finalize() won't be compiled as vtable calls (IC call
 610     // resolution will catch the illegal call) and the few legal calls
 611     // on array types won't be either.
 612     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 613                                       receiver_type, is_virtual,
 614                                       call_does_dispatch, vtable_index);  // out-parameters
 615     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 616   }
 617 
 618   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 619   ciKlass* receiver_constraint = nullptr;
 620   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 621     ciInstanceKlass* calling_klass = method()->holder();
 622     ciInstanceKlass* sender_klass = calling_klass;
 623     if (sender_klass->is_interface()) {
 624       receiver_constraint = sender_klass;
 625     }
 626   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 627     assert(holder->is_interface(), "How did we get a non-interface method here!");
 628     receiver_constraint = holder;
 629   }
 630 
 631   if (receiver_constraint != nullptr) {
 632     Node* receiver_node = stack(sp() - nargs);
 633     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 634     Node* bad_type_ctrl = nullptr;
 635     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 636     if (bad_type_ctrl != nullptr) {
 637       PreserveJVMState pjvms(this);
 638       set_control(bad_type_ctrl);
 639       uncommon_trap(Deoptimization::Reason_class_check,
 640                     Deoptimization::Action_none);

 730 
 731   assert(check_call_consistency(jvms, cg), "inconsistent info");
 732 
 733   if (!stopped()) {
 734     // This was some sort of virtual call, which did a null check for us.
 735     // Now we can assert receiver-not-null, on the normal return path.
 736     if (receiver != nullptr && cg->is_virtual()) {
 737       Node* cast = cast_not_null(receiver);
 738       // %%% assert(receiver == cast, "should already have cast the receiver");
 739     }
 740 
 741     ciType* rtype = cg->method()->return_type();
 742     ciType* ctype = declared_signature->return_type();
 743 
 744     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 745       // Be careful here with return types.
 746       if (ctype != rtype) {
 747         BasicType rt = rtype->basic_type();
 748         BasicType ct = ctype->basic_type();
 749         if (ct == T_VOID) {
 750           // It's OK for a method to return a value that is discarded.
 751           // The discarding does not require any special action from the caller.
 752           // The Java code knows this, at VerifyType.isNullConversion.
 753           pop_node(rt);  // whatever it was, pop it
 754         } else if (rt == T_INT || is_subword_type(rt)) {
 755           // Nothing.  These cases are handled in lambda form bytecode.
 756           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 757         } else if (is_reference_type(rt)) {
 758           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 759           if (ctype->is_loaded()) {
 760             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 761             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 762             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 763               Node* retnode = pop();
 764               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 765               push(cast_obj);
 766             }
 767           }
 768         } else {
 769           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 770           // push a zero; it's better than getting an oop/int mismatch

 788     // If the return type of the method is not loaded, assert that the
 789     // value we got is a null.  Otherwise, we need to recompile.
 790     if (!rtype->is_loaded()) {
 791       if (PrintOpto && (Verbose || WizardMode)) {
 792         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 793         cg->method()->print_name(); tty->cr();
 794       }
 795       if (C->log() != nullptr) {
 796         C->log()->elem("assert_null reason='return' klass='%d'",
 797                        C->log()->identify(rtype));
 798       }
 799       // If there is going to be a trap, put it at the next bytecode:
 800       set_bci(iter().next_bci());
 801       null_assert(peek());
 802       set_bci(iter().cur_bci()); // put it back
 803     }
 804     BasicType ct = ctype->basic_type();
 805     if (is_reference_type(ct)) {
 806       record_profiled_return_for_speculation();
 807     }
 808     if (rtype->is_inlinetype() && !peek()->is_InlineType()) {
 809       Node* retnode = pop();
 810       retnode = InlineTypeNode::make_from_oop(this, retnode, rtype->as_inline_klass(), !gvn().type(retnode)->maybe_null());
 811       push_node(T_OBJECT, retnode);
 812     }
 813 
 814     // Did we inline a value class constructor from another value class constructor?
 815     if (_caller->has_method() && cg->is_inline() && cg->method()->is_object_constructor() && cg->method()->holder()->is_inlinetype() &&
 816         _method->is_object_constructor() && _method->holder()->is_inlinetype() && receiver == _caller->map()->argument(_caller, 0)) {
 817       // Update the receiver in the exit map because the constructor call updated it.
 818       // MethodLiveness::BasicBlock::compute_gen_kill_single ensures that the receiver in local(0) is still live.
 819       assert(local(0)->is_InlineType(), "Unexpected receiver");
 820       assert(receiver->bottom_type()->inline_klass() == local(0)->bottom_type()->inline_klass(), "Receiver type mismatch");
 821       _exits.map()->replace_edge(receiver, local(0), &_gvn);
 822     }
 823   }
 824 
 825   // Restart record of parsing work after possible inlining of call
 826 #ifndef PRODUCT
 827   parse_histogram()->set_initial_state(bc());
 828 #endif
 829 }
 830 
 831 //---------------------------catch_call_exceptions-----------------------------
 832 // Put a Catch and CatchProj nodes behind a just-created call.
 833 // Send their caught exceptions to the proper handler.
 834 // This may be used after a call to the rethrow VM stub,
 835 // when it is needed to process unloaded exception classes.
 836 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 837   // Exceptions are delivered through this channel:
 838   Node* i_o = this->i_o();
 839 
 840   // Add a CatchNode.
 841   Arena tmp_mem{mtCompiler};
 842   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
< prev index next >