8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciCallSite.hpp"
26 #include "ci/ciMethodHandle.hpp"
27 #include "ci/ciSymbols.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "interpreter/linkResolver.hpp"
32 #include "logging/log.hpp"
33 #include "logging/logLevel.hpp"
34 #include "logging/logMessage.hpp"
35 #include "logging/logStream.hpp"
36 #include "opto/addnode.hpp"
37 #include "opto/callGenerator.hpp"
38 #include "opto/castnode.hpp"
39 #include "opto/cfgnode.hpp"
40 #include "opto/graphKit.hpp"
41 #include "opto/mulnode.hpp"
42 #include "opto/parse.hpp"
43 #include "opto/rootnode.hpp"
44 #include "opto/runtime.hpp"
45 #include "opto/subnode.hpp"
46 #include "prims/methodHandles.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "utilities/macros.hpp"
49 #if INCLUDE_JFR
50 #include "jfr/jfr.hpp"
51 #endif
52
53 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
54 bool with_deco) {
55 if (with_deco) {
56 CompileTask::print_inline_indent(depth, out);
57 }
58 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
59 prof_klass->name()->print_symbol_on(out);
60 if (with_deco) {
61 out->cr();
62 }
63 }
64
65 static void trace_type_profile(Compile* C, ciMethod* method, JVMState* jvms,
66 ciMethod* prof_method, ciKlass* prof_klass, int site_count, int receiver_count) {
67 int depth = jvms->depth() - 1;
68 int bci = jvms->bci();
598 // Speculative type of the receiver if any
599 ciKlass* speculative_receiver_type = nullptr;
600 if (is_virtual_or_interface) {
601 Node* receiver_node = stack(sp() - nargs);
602 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
603 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
604 // For arrays, klass below is Object. When vtable calls are used,
605 // resolving the call with Object would allow an illegal call to
606 // finalize() on an array. We use holder instead: illegal calls to
607 // finalize() won't be compiled as vtable calls (IC call
608 // resolution will catch the illegal call) and the few legal calls
609 // on array types won't be either.
610 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
611 receiver_type, is_virtual,
612 call_does_dispatch, vtable_index); // out-parameters
613 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
614 }
615
616 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
617 ciKlass* receiver_constraint = nullptr;
618 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
619 ciInstanceKlass* calling_klass = method()->holder();
620 ciInstanceKlass* sender_klass = calling_klass;
621 if (sender_klass->is_interface()) {
622 receiver_constraint = sender_klass;
623 }
624 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
625 assert(holder->is_interface(), "How did we get a non-interface method here!");
626 receiver_constraint = holder;
627 }
628
629 if (receiver_constraint != nullptr) {
630 Node* receiver_node = stack(sp() - nargs);
631 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
632 Node* bad_type_ctrl = nullptr;
633 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
634 if (bad_type_ctrl != nullptr) {
635 PreserveJVMState pjvms(this);
636 set_control(bad_type_ctrl);
637 uncommon_trap(Deoptimization::Reason_class_check,
638 Deoptimization::Action_none);
639 }
640 if (stopped()) {
641 return; // MUST uncommon-trap?
642 }
643 set_stack(sp() - nargs, casted_receiver);
644 }
645
646 // Note: It's OK to try to inline a virtual call.
647 // The call generator will not attempt to inline a polymorphic call
648 // unless it knows how to optimize the receiver dispatch.
649 bool try_inline = (C->do_inlining() || InlineAccessors);
650
651 // ---------------------
652 dec_sp(nargs); // Temporarily pop args for JVM state of call
653 JVMState* jvms = sync_jvms();
654
655 // ---------------------
656 // Decide call tactic.
657 // This call checks with CHA, the interpreter profile, intrinsics table, etc.
658 // It decides whether inlining is desirable or not.
659 CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
660
661 // NOTE: Don't use orig_callee and callee after this point! Use cg->method() instead.
662 orig_callee = callee = nullptr;
663
664 // ---------------------
665
666 // Feed profiling data for arguments to the type system so it can
667 // propagate it as speculative types
668 record_profiled_arguments_for_speculation(cg->method(), bc());
669
670 #ifndef PRODUCT
671 // bump global counters for calls
672 count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
673
674 // Record first part of parsing work for this call
675 parse_histogram()->record_change();
676 #endif // not PRODUCT
677
678 assert(jvms == this->jvms(), "still operating on the right JVMS");
679 assert(jvms_in_sync(), "jvms must carry full info into CG");
726
727 assert(check_call_consistency(jvms, cg), "inconsistent info");
728
729 if (!stopped()) {
730 // This was some sort of virtual call, which did a null check for us.
731 // Now we can assert receiver-not-null, on the normal return path.
732 if (receiver != nullptr && cg->is_virtual()) {
733 Node* cast = cast_not_null(receiver);
734 // %%% assert(receiver == cast, "should already have cast the receiver");
735 }
736
737 ciType* rtype = cg->method()->return_type();
738 ciType* ctype = declared_signature->return_type();
739
740 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
741 // Be careful here with return types.
742 if (ctype != rtype) {
743 BasicType rt = rtype->basic_type();
744 BasicType ct = ctype->basic_type();
745 if (ct == T_VOID) {
746 // It's OK for a method to return a value that is discarded.
747 // The discarding does not require any special action from the caller.
748 // The Java code knows this, at VerifyType.isNullConversion.
749 pop_node(rt); // whatever it was, pop it
750 } else if (rt == T_INT || is_subword_type(rt)) {
751 // Nothing. These cases are handled in lambda form bytecode.
752 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
753 } else if (is_reference_type(rt)) {
754 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
755 if (ctype->is_loaded()) {
756 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
757 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
758 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
759 Node* retnode = pop();
760 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
761 push(cast_obj);
762 }
763 }
764 } else {
765 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
766 // push a zero; it's better than getting an oop/int mismatch
784 // If the return type of the method is not loaded, assert that the
785 // value we got is a null. Otherwise, we need to recompile.
786 if (!rtype->is_loaded()) {
787 if (PrintOpto && (Verbose || WizardMode)) {
788 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
789 cg->method()->print_name(); tty->cr();
790 }
791 if (C->log() != nullptr) {
792 C->log()->elem("assert_null reason='return' klass='%d'",
793 C->log()->identify(rtype));
794 }
795 // If there is going to be a trap, put it at the next bytecode:
796 set_bci(iter().next_bci());
797 null_assert(peek());
798 set_bci(iter().cur_bci()); // put it back
799 }
800 BasicType ct = ctype->basic_type();
801 if (is_reference_type(ct)) {
802 record_profiled_return_for_speculation();
803 }
804 }
805
806 // Restart record of parsing work after possible inlining of call
807 #ifndef PRODUCT
808 parse_histogram()->set_initial_state(bc());
809 #endif
810 }
811
812 //---------------------------catch_call_exceptions-----------------------------
813 // Put a Catch and CatchProj nodes behind a just-created call.
814 // Send their caught exceptions to the proper handler.
815 // This may be used after a call to the rethrow VM stub,
816 // when it is needed to process unloaded exception classes.
817 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
818 // Exceptions are delivered through this channel:
819 Node* i_o = this->i_o();
820
821 // Add a CatchNode.
822 Arena tmp_mem{mtCompiler};
823 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciCallSite.hpp"
26 #include "ci/ciMethodHandle.hpp"
27 #include "ci/ciSymbols.hpp"
28 #include "classfile/vmIntrinsics.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "compiler/compileLog.hpp"
32 #include "interpreter/linkResolver.hpp"
33 #include "jvm_io.h"
34 #include "logging/log.hpp"
35 #include "logging/logLevel.hpp"
36 #include "logging/logMessage.hpp"
37 #include "logging/logStream.hpp"
38 #include "opto/addnode.hpp"
39 #include "opto/callGenerator.hpp"
40 #include "opto/castnode.hpp"
41 #include "opto/cfgnode.hpp"
42 #include "opto/graphKit.hpp"
43 #include "opto/inlinetypenode.hpp"
44 #include "opto/mulnode.hpp"
45 #include "opto/parse.hpp"
46 #include "opto/rootnode.hpp"
47 #include "opto/runtime.hpp"
48 #include "opto/subnode.hpp"
49 #include "prims/methodHandles.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "utilities/macros.hpp"
52 #include "utilities/ostream.hpp"
53 #if INCLUDE_JFR
54 #include "jfr/jfr.hpp"
55 #endif
56
57 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
58 bool with_deco) {
59 if (with_deco) {
60 CompileTask::print_inline_indent(depth, out);
61 }
62 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
63 prof_klass->name()->print_symbol_on(out);
64 if (with_deco) {
65 out->cr();
66 }
67 }
68
69 static void trace_type_profile(Compile* C, ciMethod* method, JVMState* jvms,
70 ciMethod* prof_method, ciKlass* prof_klass, int site_count, int receiver_count) {
71 int depth = jvms->depth() - 1;
72 int bci = jvms->bci();
602 // Speculative type of the receiver if any
603 ciKlass* speculative_receiver_type = nullptr;
604 if (is_virtual_or_interface) {
605 Node* receiver_node = stack(sp() - nargs);
606 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
607 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
608 // For arrays, klass below is Object. When vtable calls are used,
609 // resolving the call with Object would allow an illegal call to
610 // finalize() on an array. We use holder instead: illegal calls to
611 // finalize() won't be compiled as vtable calls (IC call
612 // resolution will catch the illegal call) and the few legal calls
613 // on array types won't be either.
614 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
615 receiver_type, is_virtual,
616 call_does_dispatch, vtable_index); // out-parameters
617 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
618 }
619
620 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
621 ciKlass* receiver_constraint = nullptr;
622 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
623 ciInstanceKlass* calling_klass = method()->holder();
624 ciInstanceKlass* sender_klass = calling_klass;
625 if (sender_klass->is_interface()) {
626 receiver_constraint = sender_klass;
627 }
628 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
629 assert(holder->is_interface(), "How did we get a non-interface method here!");
630 receiver_constraint = holder;
631 }
632
633 if (receiver_constraint != nullptr) {
634 Node* receiver_node = stack(sp() - nargs);
635 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
636 Node* bad_type_ctrl = nullptr;
637 SafePointNode* new_cast_failure_map = nullptr;
638 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl, &new_cast_failure_map);
639 if (bad_type_ctrl != nullptr) {
640 PreserveJVMState pjvms(this);
641 if (new_cast_failure_map != nullptr) {
642 // The current map on the success path could have been modified. Use the dedicated failure path map.
643 set_map(new_cast_failure_map);
644 }
645 set_control(bad_type_ctrl);
646 uncommon_trap(Deoptimization::Reason_class_check,
647 Deoptimization::Action_none);
648 }
649 if (stopped()) {
650 return; // MUST uncommon-trap?
651 }
652 set_stack(sp() - nargs, casted_receiver);
653 }
654
655 // Note: It's OK to try to inline a virtual call.
656 // The call generator will not attempt to inline a polymorphic call
657 // unless it knows how to optimize the receiver dispatch.
658 bool try_inline = (C->do_inlining() || InlineAccessors);
659
660 // ---------------------
661 dec_sp(nargs); // Temporarily pop args for JVM state of call
662 JVMState* jvms = sync_jvms();
663
664 // ---------------------
665 // Decide call tactic.
666 // This call checks with CHA, the interpreter profile, intrinsics table, etc.
667 // It decides whether inlining is desirable or not.
668 CallGenerator* cg = C->call_generator(callee, vtable_index, call_does_dispatch, jvms, try_inline, prof_factor(), speculative_receiver_type);
669 if (failing()) {
670 return;
671 }
672 assert(cg != nullptr, "must find a CallGenerator for callee %s", callee->name()->as_utf8());
673
674 // NOTE: Don't use orig_callee and callee after this point! Use cg->method() instead.
675 orig_callee = callee = nullptr;
676
677 // ---------------------
678
679 // Feed profiling data for arguments to the type system so it can
680 // propagate it as speculative types
681 record_profiled_arguments_for_speculation(cg->method(), bc());
682
683 #ifndef PRODUCT
684 // bump global counters for calls
685 count_compiled_calls(/*at_method_entry*/ false, cg->is_inline());
686
687 // Record first part of parsing work for this call
688 parse_histogram()->record_change();
689 #endif // not PRODUCT
690
691 assert(jvms == this->jvms(), "still operating on the right JVMS");
692 assert(jvms_in_sync(), "jvms must carry full info into CG");
739
740 assert(check_call_consistency(jvms, cg), "inconsistent info");
741
742 if (!stopped()) {
743 // This was some sort of virtual call, which did a null check for us.
744 // Now we can assert receiver-not-null, on the normal return path.
745 if (receiver != nullptr && cg->is_virtual()) {
746 Node* cast = cast_not_null(receiver);
747 // %%% assert(receiver == cast, "should already have cast the receiver");
748 }
749
750 ciType* rtype = cg->method()->return_type();
751 ciType* ctype = declared_signature->return_type();
752
753 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
754 // Be careful here with return types.
755 if (ctype != rtype) {
756 BasicType rt = rtype->basic_type();
757 BasicType ct = ctype->basic_type();
758 if (ct == T_VOID) {
759 // It's OK for a method to return a value that is discarded.
760 // The discarding does not require any special action from the caller.
761 // The Java code knows this, at VerifyType.isNullConversion.
762 pop_node(rt); // whatever it was, pop it
763 } else if (rt == T_INT || is_subword_type(rt)) {
764 // Nothing. These cases are handled in lambda form bytecode.
765 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
766 } else if (is_reference_type(rt)) {
767 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
768 if (ctype->is_loaded()) {
769 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
770 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
771 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
772 Node* retnode = pop();
773 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
774 push(cast_obj);
775 }
776 }
777 } else {
778 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
779 // push a zero; it's better than getting an oop/int mismatch
797 // If the return type of the method is not loaded, assert that the
798 // value we got is a null. Otherwise, we need to recompile.
799 if (!rtype->is_loaded()) {
800 if (PrintOpto && (Verbose || WizardMode)) {
801 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
802 cg->method()->print_name(); tty->cr();
803 }
804 if (C->log() != nullptr) {
805 C->log()->elem("assert_null reason='return' klass='%d'",
806 C->log()->identify(rtype));
807 }
808 // If there is going to be a trap, put it at the next bytecode:
809 set_bci(iter().next_bci());
810 null_assert(peek());
811 set_bci(iter().cur_bci()); // put it back
812 }
813 BasicType ct = ctype->basic_type();
814 if (is_reference_type(ct)) {
815 record_profiled_return_for_speculation();
816 }
817
818 if (!rtype->is_void()) {
819 Node* retnode = peek();
820 const Type* rettype = gvn().type(retnode);
821 if (!cg->method()->return_value_is_larval() && !retnode->is_InlineType() && rettype->is_inlinetypeptr()) {
822 retnode = InlineTypeNode::make_from_oop(this, retnode, rettype->inline_klass());
823 dec_sp(1);
824 push(retnode);
825 }
826 }
827
828 if (cg->method()->receiver_maybe_larval() && receiver != nullptr &&
829 !receiver->is_InlineType() && gvn().type(receiver)->is_inlinetypeptr()) {
830 InlineTypeNode* non_larval = InlineTypeNode::make_from_oop(this, receiver, gvn().type(receiver)->inline_klass());
831 // Relinquish the oop input, we will delay the allocation to the point it is needed, see the
832 // comments in InlineTypeNode::Ideal for more details
833 non_larval = non_larval->clone_if_required(&gvn(), nullptr);
834 non_larval->set_oop(gvn(), null());
835 non_larval->set_is_buffered(gvn(), false);
836 non_larval = gvn().transform(non_larval)->as_InlineType();
837 map()->replace_edge(receiver, non_larval);
838 }
839 }
840
841 // Restart record of parsing work after possible inlining of call
842 #ifndef PRODUCT
843 parse_histogram()->set_initial_state(bc());
844 #endif
845 }
846
847 //---------------------------catch_call_exceptions-----------------------------
848 // Put a Catch and CatchProj nodes behind a just-created call.
849 // Send their caught exceptions to the proper handler.
850 // This may be used after a call to the rethrow VM stub,
851 // when it is needed to process unloaded exception classes.
852 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
853 // Exceptions are delivered through this channel:
854 Node* i_o = this->i_o();
855
856 // Add a CatchNode.
857 Arena tmp_mem{mtCompiler};
858 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|