< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "logging/log.hpp"
  34 #include "logging/logLevel.hpp"
  35 #include "logging/logMessage.hpp"
  36 #include "logging/logStream.hpp"
  37 #include "opto/addnode.hpp"
  38 #include "opto/callGenerator.hpp"
  39 #include "opto/castnode.hpp"
  40 #include "opto/cfgnode.hpp"

  41 #include "opto/mulnode.hpp"
  42 #include "opto/parse.hpp"
  43 #include "opto/rootnode.hpp"
  44 #include "opto/runtime.hpp"
  45 #include "opto/subnode.hpp"
  46 #include "prims/methodHandles.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "utilities/macros.hpp"
  49 #if INCLUDE_JFR
  50 #include "jfr/jfr.hpp"
  51 #endif
  52 
  53 void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
  54   CompileTask::print_inline_indent(depth, out);
  55   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  56   prof_klass->name()->print_symbol_on(out);
  57   out->cr();
  58 }
  59 
  60 void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,

 560   // Push appendix argument (MethodType, CallSite, etc.), if one.
 561   if (iter().has_appendix()) {
 562     ciObject* appendix_arg = iter().get_appendix();
 563     const TypeOopPtr* appendix_arg_type = TypeOopPtr::make_from_constant(appendix_arg, /* require_const= */ true);
 564     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 565     push(appendix_arg_node);
 566   }
 567 
 568   // ---------------------
 569   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 570   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 571   // Does the call-site type profile reveal only one receiver?
 572   // Then we may introduce a run-time check and inline on the path where it succeeds.
 573   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 574 
 575   // Try to get the most accurate receiver type
 576   ciMethod* callee             = orig_callee;
 577   int       vtable_index       = Method::invalid_vtable_index;
 578   bool      call_does_dispatch = false;
 579 




















 580   // Speculative type of the receiver if any
 581   ciKlass* speculative_receiver_type = nullptr;
 582   if (is_virtual_or_interface) {
 583     Node* receiver_node             = stack(sp() - nargs);
 584     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 585     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 586     // For arrays, klass below is Object. When vtable calls are used,
 587     // resolving the call with Object would allow an illegal call to
 588     // finalize() on an array. We use holder instead: illegal calls to
 589     // finalize() won't be compiled as vtable calls (IC call
 590     // resolution will catch the illegal call) and the few legal calls
 591     // on array types won't be either.
 592     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 593                                       receiver_type, is_virtual,
 594                                       call_does_dispatch, vtable_index);  // out-parameters
 595     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 596   }
 597 
 598   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 599   ciKlass* receiver_constraint = nullptr;
 600   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 601     ciInstanceKlass* calling_klass = method()->holder();
 602     ciInstanceKlass* sender_klass = calling_klass;
 603     if (sender_klass->is_interface()) {
 604       receiver_constraint = sender_klass;
 605     }
 606   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 607     assert(holder->is_interface(), "How did we get a non-interface method here!");
 608     receiver_constraint = holder;
 609   }
 610 
 611   if (receiver_constraint != nullptr) {
 612     Node* receiver_node = stack(sp() - nargs);
 613     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 614     Node* bad_type_ctrl = nullptr;
 615     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 616     if (bad_type_ctrl != nullptr) {
 617       PreserveJVMState pjvms(this);
 618       set_control(bad_type_ctrl);
 619       uncommon_trap(Deoptimization::Reason_class_check,
 620                     Deoptimization::Action_none);

 647   // Round double arguments before call
 648   round_double_arguments(cg->method());
 649 
 650   // Feed profiling data for arguments to the type system so it can
 651   // propagate it as speculative types
 652   record_profiled_arguments_for_speculation(cg->method(), bc());
 653 
 654 #ifndef PRODUCT
 655   // bump global counters for calls
 656   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 657 
 658   // Record first part of parsing work for this call
 659   parse_histogram()->record_change();
 660 #endif // not PRODUCT
 661 
 662   assert(jvms == this->jvms(), "still operating on the right JVMS");
 663   assert(jvms_in_sync(),       "jvms must carry full info into CG");
 664 
 665   // save across call, for a subsequent cast_not_null.
 666   Node* receiver = has_receiver ? argument(0) : nullptr;

 667 
 668   // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
 669   if (receiver != nullptr && !call_does_dispatch && !cg->is_string_late_inline()) {
 670     // Feed profiling data for a single receiver to the type system so
 671     // it can propagate it as a speculative type
 672     receiver = record_profiled_receiver_for_speculation(receiver);
 673   }
 674 
 675   JVMState* new_jvms = cg->generate(jvms);
 676   if (new_jvms == nullptr) {
 677     // When inlining attempt fails (e.g., too many arguments),
 678     // it may contaminate the current compile state, making it
 679     // impossible to pull back and try again.  Once we call
 680     // cg->generate(), we are committed.  If it fails, the whole
 681     // compilation task is compromised.
 682     if (failing())  return;
 683 
 684     // This can happen if a library intrinsic is available, but refuses
 685     // the call site, perhaps because it did not match a pattern the
 686     // intrinsic was expecting to optimize. Should always be possible to

 710 
 711   assert(check_call_consistency(jvms, cg), "inconsistent info");
 712 
 713   if (!stopped()) {
 714     // This was some sort of virtual call, which did a null check for us.
 715     // Now we can assert receiver-not-null, on the normal return path.
 716     if (receiver != nullptr && cg->is_virtual()) {
 717       Node* cast = cast_not_null(receiver);
 718       // %%% assert(receiver == cast, "should already have cast the receiver");
 719     }
 720 
 721     ciType* rtype = cg->method()->return_type();
 722     ciType* ctype = declared_signature->return_type();
 723 
 724     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 725       // Be careful here with return types.
 726       if (ctype != rtype) {
 727         BasicType rt = rtype->basic_type();
 728         BasicType ct = ctype->basic_type();
 729         if (ct == T_VOID) {
 730           // It's OK for a method  to return a value that is discarded.
 731           // The discarding does not require any special action from the caller.
 732           // The Java code knows this, at VerifyType.isNullConversion.
 733           pop_node(rt);  // whatever it was, pop it
 734         } else if (rt == T_INT || is_subword_type(rt)) {
 735           // Nothing.  These cases are handled in lambda form bytecode.
 736           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 737         } else if (is_reference_type(rt)) {
 738           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 739           if (ctype->is_loaded()) {
 740             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 741             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 742             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 743               Node* retnode = pop();
 744               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 745               push(cast_obj);
 746             }
 747           }
 748         } else {
 749           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 750           // push a zero; it's better than getting an oop/int mismatch

 768     // If the return type of the method is not loaded, assert that the
 769     // value we got is a null.  Otherwise, we need to recompile.
 770     if (!rtype->is_loaded()) {
 771       if (PrintOpto && (Verbose || WizardMode)) {
 772         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 773         cg->method()->print_name(); tty->cr();
 774       }
 775       if (C->log() != nullptr) {
 776         C->log()->elem("assert_null reason='return' klass='%d'",
 777                        C->log()->identify(rtype));
 778       }
 779       // If there is going to be a trap, put it at the next bytecode:
 780       set_bci(iter().next_bci());
 781       null_assert(peek());
 782       set_bci(iter().cur_bci()); // put it back
 783     }
 784     BasicType ct = ctype->basic_type();
 785     if (is_reference_type(ct)) {
 786       record_profiled_return_for_speculation();
 787     }















 788   }
 789 
 790   // Restart record of parsing work after possible inlining of call
 791 #ifndef PRODUCT
 792   parse_histogram()->set_initial_state(bc());
 793 #endif
 794 }
 795 
 796 //---------------------------catch_call_exceptions-----------------------------
 797 // Put a Catch and CatchProj nodes behind a just-created call.
 798 // Send their caught exceptions to the proper handler.
 799 // This may be used after a call to the rethrow VM stub,
 800 // when it is needed to process unloaded exception classes.
 801 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 802   // Exceptions are delivered through this channel:
 803   Node* i_o = this->i_o();
 804 
 805   // Add a CatchNode.
 806   Arena tmp_mem{mtCompiler};
 807   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "logging/log.hpp"
  34 #include "logging/logLevel.hpp"
  35 #include "logging/logMessage.hpp"
  36 #include "logging/logStream.hpp"
  37 #include "opto/addnode.hpp"
  38 #include "opto/callGenerator.hpp"
  39 #include "opto/castnode.hpp"
  40 #include "opto/cfgnode.hpp"
  41 #include "opto/inlinetypenode.hpp"
  42 #include "opto/mulnode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/rootnode.hpp"
  45 #include "opto/runtime.hpp"
  46 #include "opto/subnode.hpp"
  47 #include "prims/methodHandles.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "utilities/macros.hpp"
  50 #if INCLUDE_JFR
  51 #include "jfr/jfr.hpp"
  52 #endif
  53 
  54 void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
  55   CompileTask::print_inline_indent(depth, out);
  56   out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
  57   prof_klass->name()->print_symbol_on(out);
  58   out->cr();
  59 }
  60 
  61 void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,

 561   // Push appendix argument (MethodType, CallSite, etc.), if one.
 562   if (iter().has_appendix()) {
 563     ciObject* appendix_arg = iter().get_appendix();
 564     const TypeOopPtr* appendix_arg_type = TypeOopPtr::make_from_constant(appendix_arg, /* require_const= */ true);
 565     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 566     push(appendix_arg_node);
 567   }
 568 
 569   // ---------------------
 570   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 571   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 572   // Does the call-site type profile reveal only one receiver?
 573   // Then we may introduce a run-time check and inline on the path where it succeeds.
 574   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 575 
 576   // Try to get the most accurate receiver type
 577   ciMethod* callee             = orig_callee;
 578   int       vtable_index       = Method::invalid_vtable_index;
 579   bool      call_does_dispatch = false;
 580 
 581   // Detect the call to the object or abstract class constructor at the end of a value constructor to know when we are done initializing the larval
 582   if (orig_callee->is_object_constructor() && (orig_callee->holder()->is_abstract() || orig_callee->holder()->is_java_lang_Object()) && stack(sp() - nargs)->is_InlineType()) {
 583     assert(method()->is_object_constructor() && (method()->holder()->is_inlinetype() || method()->holder()->is_abstract()), "Unexpected caller");
 584     InlineTypeNode* receiver = stack(sp() - nargs)->as_InlineType();
 585     // TODO 8325106 re-enable the assert and add the same check for the receiver in the caller map
 586     //assert(receiver->is_larval(), "must be larval");
 587     InlineTypeNode* clone = receiver->clone_if_required(&_gvn, _map);
 588     clone->set_is_larval(false);
 589     clone = _gvn.transform(clone)->as_InlineType();
 590     replace_in_map(receiver, clone);
 591 
 592     if (_caller->has_method()) {
 593       // Get receiver from the caller map and update it in the exit map now that we are done initializing it
 594       SafePointNode* map = _caller->map();
 595       Node* receiver_in_caller = map->argument(_caller, 0)->as_InlineType();
 596       assert(receiver_in_caller->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(), "Receiver type mismatch");
 597       _exits.map()->replace_edge(receiver_in_caller, clone, &_gvn);
 598     }
 599   }
 600 
 601   // Speculative type of the receiver if any
 602   ciKlass* speculative_receiver_type = nullptr;
 603   if (is_virtual_or_interface) {
 604     Node* receiver_node             = stack(sp() - nargs);
 605     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 606     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 607     // For arrays, klass below is Object. When vtable calls are used,
 608     // resolving the call with Object would allow an illegal call to
 609     // finalize() on an array. We use holder instead: illegal calls to
 610     // finalize() won't be compiled as vtable calls (IC call
 611     // resolution will catch the illegal call) and the few legal calls
 612     // on array types won't be either.
 613     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 614                                       receiver_type, is_virtual,
 615                                       call_does_dispatch, vtable_index);  // out-parameters
 616     speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
 617   }
 618 
 619   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 620   ciKlass* receiver_constraint = nullptr;
 621   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 622     ciInstanceKlass* calling_klass = method()->holder();
 623     ciInstanceKlass* sender_klass = calling_klass;
 624     if (sender_klass->is_interface()) {
 625       receiver_constraint = sender_klass;
 626     }
 627   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 628     assert(holder->is_interface(), "How did we get a non-interface method here!");
 629     receiver_constraint = holder;
 630   }
 631 
 632   if (receiver_constraint != nullptr) {
 633     Node* receiver_node = stack(sp() - nargs);
 634     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
 635     Node* bad_type_ctrl = nullptr;
 636     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 637     if (bad_type_ctrl != nullptr) {
 638       PreserveJVMState pjvms(this);
 639       set_control(bad_type_ctrl);
 640       uncommon_trap(Deoptimization::Reason_class_check,
 641                     Deoptimization::Action_none);

 668   // Round double arguments before call
 669   round_double_arguments(cg->method());
 670 
 671   // Feed profiling data for arguments to the type system so it can
 672   // propagate it as speculative types
 673   record_profiled_arguments_for_speculation(cg->method(), bc());
 674 
 675 #ifndef PRODUCT
 676   // bump global counters for calls
 677   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 678 
 679   // Record first part of parsing work for this call
 680   parse_histogram()->record_change();
 681 #endif // not PRODUCT
 682 
 683   assert(jvms == this->jvms(), "still operating on the right JVMS");
 684   assert(jvms_in_sync(),       "jvms must carry full info into CG");
 685 
 686   // save across call, for a subsequent cast_not_null.
 687   Node* receiver = has_receiver ? argument(0) : nullptr;
 688   Node* receiver_in_caller = local(0);
 689 
 690   // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
 691   if (receiver != nullptr && !call_does_dispatch && !cg->is_string_late_inline()) {
 692     // Feed profiling data for a single receiver to the type system so
 693     // it can propagate it as a speculative type
 694     receiver = record_profiled_receiver_for_speculation(receiver);
 695   }
 696 
 697   JVMState* new_jvms = cg->generate(jvms);
 698   if (new_jvms == nullptr) {
 699     // When inlining attempt fails (e.g., too many arguments),
 700     // it may contaminate the current compile state, making it
 701     // impossible to pull back and try again.  Once we call
 702     // cg->generate(), we are committed.  If it fails, the whole
 703     // compilation task is compromised.
 704     if (failing())  return;
 705 
 706     // This can happen if a library intrinsic is available, but refuses
 707     // the call site, perhaps because it did not match a pattern the
 708     // intrinsic was expecting to optimize. Should always be possible to

 732 
 733   assert(check_call_consistency(jvms, cg), "inconsistent info");
 734 
 735   if (!stopped()) {
 736     // This was some sort of virtual call, which did a null check for us.
 737     // Now we can assert receiver-not-null, on the normal return path.
 738     if (receiver != nullptr && cg->is_virtual()) {
 739       Node* cast = cast_not_null(receiver);
 740       // %%% assert(receiver == cast, "should already have cast the receiver");
 741     }
 742 
 743     ciType* rtype = cg->method()->return_type();
 744     ciType* ctype = declared_signature->return_type();
 745 
 746     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 747       // Be careful here with return types.
 748       if (ctype != rtype) {
 749         BasicType rt = rtype->basic_type();
 750         BasicType ct = ctype->basic_type();
 751         if (ct == T_VOID) {
 752           // It's OK for a method to return a value that is discarded.
 753           // The discarding does not require any special action from the caller.
 754           // The Java code knows this, at VerifyType.isNullConversion.
 755           pop_node(rt);  // whatever it was, pop it
 756         } else if (rt == T_INT || is_subword_type(rt)) {
 757           // Nothing.  These cases are handled in lambda form bytecode.
 758           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 759         } else if (is_reference_type(rt)) {
 760           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 761           if (ctype->is_loaded()) {
 762             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 763             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 764             if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
 765               Node* retnode = pop();
 766               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 767               push(cast_obj);
 768             }
 769           }
 770         } else {
 771           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 772           // push a zero; it's better than getting an oop/int mismatch

 790     // If the return type of the method is not loaded, assert that the
 791     // value we got is a null.  Otherwise, we need to recompile.
 792     if (!rtype->is_loaded()) {
 793       if (PrintOpto && (Verbose || WizardMode)) {
 794         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 795         cg->method()->print_name(); tty->cr();
 796       }
 797       if (C->log() != nullptr) {
 798         C->log()->elem("assert_null reason='return' klass='%d'",
 799                        C->log()->identify(rtype));
 800       }
 801       // If there is going to be a trap, put it at the next bytecode:
 802       set_bci(iter().next_bci());
 803       null_assert(peek());
 804       set_bci(iter().cur_bci()); // put it back
 805     }
 806     BasicType ct = ctype->basic_type();
 807     if (is_reference_type(ct)) {
 808       record_profiled_return_for_speculation();
 809     }
 810     if (rtype->is_inlinetype() && !peek()->is_InlineType()) {
 811       Node* retnode = pop();
 812       retnode = InlineTypeNode::make_from_oop(this, retnode, rtype->as_inline_klass(), !gvn().type(retnode)->maybe_null());
 813       push_node(T_OBJECT, retnode);
 814     }
 815   }
 816 
 817   // Did we inline a value class constructor from another value class constructor?
 818   if (cg->is_inline() && cg->method()->is_object_constructor() && cg->method()->holder()->is_inlinetype() &&
 819       _method->is_object_constructor() && cg->method()->holder()->is_inlinetype() && receiver_in_caller == receiver) {
 820     // Update the receiver in the exit map because the constructor call updated it.
 821     // MethodLiveness::BasicBlock::compute_gen_kill_single ensures that the receiver in local(0) is live.
 822     assert(local(0)->is_InlineType(), "Unexpected receiver");
 823     assert(receiver->bottom_type()->inline_klass() == local(0)->bottom_type()->inline_klass(), "Receiver type mismatch");
 824     _exits.map()->replace_edge(receiver, local(0), &_gvn);
 825   }
 826 
 827   // Restart record of parsing work after possible inlining of call
 828 #ifndef PRODUCT
 829   parse_histogram()->set_initial_state(bc());
 830 #endif
 831 }
 832 
 833 //---------------------------catch_call_exceptions-----------------------------
 834 // Put a Catch and CatchProj nodes behind a just-created call.
 835 // Send their caught exceptions to the proper handler.
 836 // This may be used after a call to the rethrow VM stub,
 837 // when it is needed to process unloaded exception classes.
 838 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
 839   // Exceptions are delivered through this channel:
 840   Node* i_o = this->i_o();
 841 
 842   // Add a CatchNode.
 843   Arena tmp_mem{mtCompiler};
 844   GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
< prev index next >