< prev index next >

src/hotspot/share/opto/doCall.cpp

Print this page

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "opto/addnode.hpp"
  34 #include "opto/callGenerator.hpp"
  35 #include "opto/castnode.hpp"
  36 #include "opto/cfgnode.hpp"

  37 #include "opto/mulnode.hpp"
  38 #include "opto/parse.hpp"
  39 #include "opto/rootnode.hpp"
  40 #include "opto/runtime.hpp"
  41 #include "opto/subnode.hpp"
  42 #include "prims/methodHandles.hpp"
  43 #include "runtime/sharedRuntime.hpp"
  44 
  45 void trace_type_profile(Compile* C, ciMethod *method, int depth, int bci, ciMethod *prof_method, ciKlass *prof_klass, int site_count, int receiver_count) {
  46   if (TraceTypeProfile || C->print_inlining()) {
  47     outputStream* out = tty;
  48     if (!C->print_inlining()) {
  49       if (!PrintOpto && !PrintCompilation) {
  50         method->print_short_name();
  51         tty->cr();
  52       }
  53       CompileTask::print_inlining_tty(prof_method, depth, bci);
  54     } else {
  55       out = C->print_inlining_stream();
  56     }

 540     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 541     push(appendix_arg_node);
 542   }
 543 
 544   // ---------------------
 545   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 546   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 547   // Does the call-site type profile reveal only one receiver?
 548   // Then we may introduce a run-time check and inline on the path where it succeeds.
 549   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 550 
 551   // Try to get the most accurate receiver type
 552   ciMethod* callee             = orig_callee;
 553   int       vtable_index       = Method::invalid_vtable_index;
 554   bool      call_does_dispatch = false;
 555 
 556   // Speculative type of the receiver if any
 557   ciKlass* speculative_receiver_type = NULL;
 558   if (is_virtual_or_interface) {
 559     Node* receiver_node             = stack(sp() - nargs);
 560     const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();





 561     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 562     // For arrays, klass below is Object. When vtable calls are used,
 563     // resolving the call with Object would allow an illegal call to
 564     // finalize() on an array. We use holder instead: illegal calls to
 565     // finalize() won't be compiled as vtable calls (IC call
 566     // resolution will catch the illegal call) and the few legal calls
 567     // on array types won't be either.
 568     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 569                                       receiver_type, is_virtual,
 570                                       call_does_dispatch, vtable_index);  // out-parameters
 571     speculative_receiver_type = receiver_type != NULL ? receiver_type->speculative_type() : NULL;
 572   }
 573 
 574   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 575   ciKlass* receiver_constraint = NULL;
 576   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
 577     ciInstanceKlass* calling_klass = method()->holder();
 578     ciInstanceKlass* sender_klass = calling_klass;
 579     if (sender_klass->is_interface()) {
 580       receiver_constraint = sender_klass;
 581     }
 582   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 583     assert(holder->is_interface(), "How did we get a non-interface method here!");
 584     receiver_constraint = holder;
 585   }
 586 
 587   if (receiver_constraint != NULL) {
 588     Node* receiver_node = stack(sp() - nargs);
 589     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint));
 590     Node* bad_type_ctrl = NULL;
 591     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 592     if (bad_type_ctrl != NULL) {
 593       PreserveJVMState pjvms(this);
 594       set_control(bad_type_ctrl);
 595       uncommon_trap(Deoptimization::Reason_class_check,
 596                     Deoptimization::Action_none);

 625 
 626   // Feed profiling data for arguments to the type system so it can
 627   // propagate it as speculative types
 628   record_profiled_arguments_for_speculation(cg->method(), bc());
 629 
 630 #ifndef PRODUCT
 631   // bump global counters for calls
 632   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 633 
 634   // Record first part of parsing work for this call
 635   parse_histogram()->record_change();
 636 #endif // not PRODUCT
 637 
 638   assert(jvms == this->jvms(), "still operating on the right JVMS");
 639   assert(jvms_in_sync(),       "jvms must carry full info into CG");
 640 
 641   // save across call, for a subsequent cast_not_null.
 642   Node* receiver = has_receiver ? argument(0) : NULL;
 643 
 644   // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
 645   if (receiver != NULL && !call_does_dispatch && !cg->is_string_late_inline()) {
 646     // Feed profiling data for a single receiver to the type system so
 647     // it can propagate it as a speculative type
 648     receiver = record_profiled_receiver_for_speculation(receiver);
 649   }
 650 
 651   JVMState* new_jvms = cg->generate(jvms);
 652   if (new_jvms == NULL) {
 653     // When inlining attempt fails (e.g., too many arguments),
 654     // it may contaminate the current compile state, making it
 655     // impossible to pull back and try again.  Once we call
 656     // cg->generate(), we are committed.  If it fails, the whole
 657     // compilation task is compromised.
 658     if (failing())  return;
 659 
 660     // This can happen if a library intrinsic is available, but refuses
 661     // the call site, perhaps because it did not match a pattern the
 662     // intrinsic was expecting to optimize. Should always be possible to
 663     // get a normal java call that may inline in that case
 664     cg = C->call_generator(cg->method(), vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type, /* allow_intrinsics= */ false);
 665     new_jvms = cg->generate(jvms);

 686 
 687   assert(check_call_consistency(jvms, cg), "inconsistent info");
 688 
 689   if (!stopped()) {
 690     // This was some sort of virtual call, which did a null check for us.
 691     // Now we can assert receiver-not-null, on the normal return path.
 692     if (receiver != NULL && cg->is_virtual()) {
 693       Node* cast = cast_not_null(receiver);
 694       // %%% assert(receiver == cast, "should already have cast the receiver");
 695     }
 696 
 697     ciType* rtype = cg->method()->return_type();
 698     ciType* ctype = declared_signature->return_type();
 699 
 700     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 701       // Be careful here with return types.
 702       if (ctype != rtype) {
 703         BasicType rt = rtype->basic_type();
 704         BasicType ct = ctype->basic_type();
 705         if (ct == T_VOID) {
 706           // It's OK for a method  to return a value that is discarded.
 707           // The discarding does not require any special action from the caller.
 708           // The Java code knows this, at VerifyType.isNullConversion.
 709           pop_node(rt);  // whatever it was, pop it
 710         } else if (rt == T_INT || is_subword_type(rt)) {
 711           // Nothing.  These cases are handled in lambda form bytecode.
 712           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 713         } else if (is_reference_type(rt)) {
 714           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 715           if (ctype->is_loaded()) {
 716             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 717             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 718             if (arg_type != NULL && !arg_type->higher_equal(sig_type)) {



 719               Node* retnode = pop();
 720               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 721               push(cast_obj);
 722             }
 723           }
 724         } else {
 725           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 726           // push a zero; it's better than getting an oop/int mismatch
 727           pop_node(rt);
 728           Node* retnode = zerocon(ct);
 729           push_node(ct, retnode);
 730         }
 731         // Now that the value is well-behaved, continue with the call-site type.
 732         rtype = ctype;
 733       }
 734     } else {
 735       // Symbolic resolution enforces the types to be the same.
 736       // NOTE: We must relax the assert for unloaded types because two
 737       // different ciType instances of the same unloaded class type
 738       // can appear to be "loaded" by different loaders (depending on
 739       // the accessing class).
 740       assert(!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype,
 741              "mismatched return types: rtype=%s, ctype=%s", rtype->name(), ctype->name());
 742     }
 743 






 744     // If the return type of the method is not loaded, assert that the
 745     // value we got is a null.  Otherwise, we need to recompile.
 746     if (!rtype->is_loaded()) {
 747       if (PrintOpto && (Verbose || WizardMode)) {
 748         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 749         cg->method()->print_name(); tty->cr();
 750       }
 751       if (C->log() != NULL) {
 752         C->log()->elem("assert_null reason='return' klass='%d'",
 753                        C->log()->identify(rtype));
 754       }
 755       // If there is going to be a trap, put it at the next bytecode:
 756       set_bci(iter().next_bci());
 757       null_assert(peek());
 758       set_bci(iter().cur_bci()); // put it back
 759     }
 760     BasicType ct = ctype->basic_type();
 761     if (is_reference_type(ct)) {
 762       record_profiled_return_for_speculation();
 763     }

  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciCallSite.hpp"
  27 #include "ci/ciMethodHandle.hpp"
  28 #include "ci/ciSymbols.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "compiler/compileBroker.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "interpreter/linkResolver.hpp"
  33 #include "opto/addnode.hpp"
  34 #include "opto/callGenerator.hpp"
  35 #include "opto/castnode.hpp"
  36 #include "opto/cfgnode.hpp"
  37 #include "opto/inlinetypenode.hpp"
  38 #include "opto/mulnode.hpp"
  39 #include "opto/parse.hpp"
  40 #include "opto/rootnode.hpp"
  41 #include "opto/runtime.hpp"
  42 #include "opto/subnode.hpp"
  43 #include "prims/methodHandles.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 
  46 void trace_type_profile(Compile* C, ciMethod *method, int depth, int bci, ciMethod *prof_method, ciKlass *prof_klass, int site_count, int receiver_count) {
  47   if (TraceTypeProfile || C->print_inlining()) {
  48     outputStream* out = tty;
  49     if (!C->print_inlining()) {
  50       if (!PrintOpto && !PrintCompilation) {
  51         method->print_short_name();
  52         tty->cr();
  53       }
  54       CompileTask::print_inlining_tty(prof_method, depth, bci);
  55     } else {
  56       out = C->print_inlining_stream();
  57     }

 541     Node* appendix_arg_node = _gvn.makecon(appendix_arg_type);
 542     push(appendix_arg_node);
 543   }
 544 
 545   // ---------------------
 546   // Does Class Hierarchy Analysis reveal only a single target of a v-call?
 547   // Then we may inline or make a static call, but become dependent on there being only 1 target.
 548   // Does the call-site type profile reveal only one receiver?
 549   // Then we may introduce a run-time check and inline on the path where it succeeds.
 550   // The other path may uncommon_trap, check for another receiver, or do a v-call.
 551 
 552   // Try to get the most accurate receiver type
 553   ciMethod* callee             = orig_callee;
 554   int       vtable_index       = Method::invalid_vtable_index;
 555   bool      call_does_dispatch = false;
 556 
 557   // Speculative type of the receiver if any
 558   ciKlass* speculative_receiver_type = NULL;
 559   if (is_virtual_or_interface) {
 560     Node* receiver_node             = stack(sp() - nargs);
 561     const TypeOopPtr* receiver_type = NULL;
 562     if (receiver_node->is_InlineType()) {
 563       receiver_type = TypeInstPtr::make(TypePtr::NotNull, _gvn.type(receiver_node)->inline_klass());
 564     } else {
 565       receiver_type = _gvn.type(receiver_node)->isa_oopptr();
 566     }
 567     // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 568     // For arrays, klass below is Object. When vtable calls are used,
 569     // resolving the call with Object would allow an illegal call to
 570     // finalize() on an array. We use holder instead: illegal calls to
 571     // finalize() won't be compiled as vtable calls (IC call
 572     // resolution will catch the illegal call) and the few legal calls
 573     // on array types won't be either.
 574     callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
 575                                       receiver_type, is_virtual,
 576                                       call_does_dispatch, vtable_index);  // out-parameters
 577     speculative_receiver_type = receiver_type != NULL ? receiver_type->speculative_type() : NULL;
 578   }
 579 
 580   // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
 581   ciKlass* receiver_constraint = NULL;
 582   if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
 583     ciInstanceKlass* calling_klass = method()->holder();
 584     ciInstanceKlass* sender_klass = calling_klass;
 585     if (sender_klass->is_interface()) {
 586       receiver_constraint = sender_klass;
 587     }
 588   } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
 589     assert(holder->is_interface(), "How did we get a non-interface method here!");
 590     receiver_constraint = holder;
 591   }
 592 
 593   if (receiver_constraint != NULL) {
 594     Node* receiver_node = stack(sp() - nargs);
 595     Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint));
 596     Node* bad_type_ctrl = NULL;
 597     Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
 598     if (bad_type_ctrl != NULL) {
 599       PreserveJVMState pjvms(this);
 600       set_control(bad_type_ctrl);
 601       uncommon_trap(Deoptimization::Reason_class_check,
 602                     Deoptimization::Action_none);

 631 
 632   // Feed profiling data for arguments to the type system so it can
 633   // propagate it as speculative types
 634   record_profiled_arguments_for_speculation(cg->method(), bc());
 635 
 636 #ifndef PRODUCT
 637   // bump global counters for calls
 638   count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
 639 
 640   // Record first part of parsing work for this call
 641   parse_histogram()->record_change();
 642 #endif // not PRODUCT
 643 
 644   assert(jvms == this->jvms(), "still operating on the right JVMS");
 645   assert(jvms_in_sync(),       "jvms must carry full info into CG");
 646 
 647   // save across call, for a subsequent cast_not_null.
 648   Node* receiver = has_receiver ? argument(0) : NULL;
 649 
 650   // The extra CheckCastPPs for speculative types mess with PhaseStringOpts
 651   if (receiver != NULL && !receiver->is_InlineType() && !call_does_dispatch && !cg->is_string_late_inline()) {
 652     // Feed profiling data for a single receiver to the type system so
 653     // it can propagate it as a speculative type
 654     receiver = record_profiled_receiver_for_speculation(receiver);
 655   }
 656 
 657   JVMState* new_jvms = cg->generate(jvms);
 658   if (new_jvms == NULL) {
 659     // When inlining attempt fails (e.g., too many arguments),
 660     // it may contaminate the current compile state, making it
 661     // impossible to pull back and try again.  Once we call
 662     // cg->generate(), we are committed.  If it fails, the whole
 663     // compilation task is compromised.
 664     if (failing())  return;
 665 
 666     // This can happen if a library intrinsic is available, but refuses
 667     // the call site, perhaps because it did not match a pattern the
 668     // intrinsic was expecting to optimize. Should always be possible to
 669     // get a normal java call that may inline in that case
 670     cg = C->call_generator(cg->method(), vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type, /* allow_intrinsics= */ false);
 671     new_jvms = cg->generate(jvms);

 692 
 693   assert(check_call_consistency(jvms, cg), "inconsistent info");
 694 
 695   if (!stopped()) {
 696     // This was some sort of virtual call, which did a null check for us.
 697     // Now we can assert receiver-not-null, on the normal return path.
 698     if (receiver != NULL && cg->is_virtual()) {
 699       Node* cast = cast_not_null(receiver);
 700       // %%% assert(receiver == cast, "should already have cast the receiver");
 701     }
 702 
 703     ciType* rtype = cg->method()->return_type();
 704     ciType* ctype = declared_signature->return_type();
 705 
 706     if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
 707       // Be careful here with return types.
 708       if (ctype != rtype) {
 709         BasicType rt = rtype->basic_type();
 710         BasicType ct = ctype->basic_type();
 711         if (ct == T_VOID) {
 712           // It's OK for a method to return a value that is discarded.
 713           // The discarding does not require any special action from the caller.
 714           // The Java code knows this, at VerifyType.isNullConversion.
 715           pop_node(rt);  // whatever it was, pop it
 716         } else if (rt == T_INT || is_subword_type(rt)) {
 717           // Nothing.  These cases are handled in lambda form bytecode.
 718           assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
 719         } else if (is_reference_type(rt)) {
 720           assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
 721           if (ctype->is_loaded()) {
 722             const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
 723             const Type*       sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
 724             if (declared_signature->returns_null_free_inline_type()) {
 725               sig_type = sig_type->join_speculative(TypePtr::NOTNULL);
 726             }
 727             if (arg_type != NULL && !arg_type->higher_equal(sig_type) && !peek()->is_InlineType()) {
 728               Node* retnode = pop();
 729               Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
 730               push(cast_obj);
 731             }
 732           }
 733         } else {
 734           assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
 735           // push a zero; it's better than getting an oop/int mismatch
 736           pop_node(rt);
 737           Node* retnode = zerocon(ct);
 738           push_node(ct, retnode);
 739         }
 740         // Now that the value is well-behaved, continue with the call-site type.
 741         rtype = ctype;
 742       }
 743     } else {
 744       // Symbolic resolution enforces the types to be the same.
 745       // NOTE: We must relax the assert for unloaded types because two
 746       // different ciType instances of the same unloaded class type
 747       // can appear to be "loaded" by different loaders (depending on
 748       // the accessing class).
 749       assert(!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype,
 750              "mismatched return types: rtype=%s, ctype=%s", rtype->name(), ctype->name());
 751     }
 752 
 753     if (rtype->basic_type() == T_INLINE_TYPE && !peek()->is_InlineType() && !gvn().type(peek())->maybe_null()) {
 754       Node* retnode = pop();
 755       retnode = InlineTypeNode::make_from_oop(this, retnode, rtype->as_inline_klass());
 756       push_node(T_INLINE_TYPE, retnode);
 757     }
 758 
 759     // If the return type of the method is not loaded, assert that the
 760     // value we got is a null.  Otherwise, we need to recompile.
 761     if (!rtype->is_loaded()) {
 762       if (PrintOpto && (Verbose || WizardMode)) {
 763         method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
 764         cg->method()->print_name(); tty->cr();
 765       }
 766       if (C->log() != NULL) {
 767         C->log()->elem("assert_null reason='return' klass='%d'",
 768                        C->log()->identify(rtype));
 769       }
 770       // If there is going to be a trap, put it at the next bytecode:
 771       set_bci(iter().next_bci());
 772       null_assert(peek());
 773       set_bci(iter().cur_bci()); // put it back
 774     }
 775     BasicType ct = ctype->basic_type();
 776     if (is_reference_type(ct)) {
 777       record_profiled_return_for_speculation();
 778     }
< prev index next >