20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciCallSite.hpp"
26 #include "ci/ciMethodHandle.hpp"
27 #include "ci/ciSymbols.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "interpreter/linkResolver.hpp"
32 #include "logging/log.hpp"
33 #include "logging/logLevel.hpp"
34 #include "logging/logMessage.hpp"
35 #include "logging/logStream.hpp"
36 #include "opto/addnode.hpp"
37 #include "opto/callGenerator.hpp"
38 #include "opto/castnode.hpp"
39 #include "opto/cfgnode.hpp"
40 #include "opto/mulnode.hpp"
41 #include "opto/parse.hpp"
42 #include "opto/rootnode.hpp"
43 #include "opto/runtime.hpp"
44 #include "opto/subnode.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/sharedRuntime.hpp"
47 #include "utilities/macros.hpp"
48 #if INCLUDE_JFR
49 #include "jfr/jfr.hpp"
50 #endif
51
52 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
53 bool with_deco) {
54 if (with_deco) {
55 CompileTask::print_inline_indent(depth, out);
56 }
57 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
58 prof_klass->name()->print_symbol_on(out);
59 if (with_deco) {
584 // Speculative type of the receiver if any
585 ciKlass* speculative_receiver_type = nullptr;
586 if (is_virtual_or_interface) {
587 Node* receiver_node = stack(sp() - nargs);
588 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
589 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
590 // For arrays, klass below is Object. When vtable calls are used,
591 // resolving the call with Object would allow an illegal call to
592 // finalize() on an array. We use holder instead: illegal calls to
593 // finalize() won't be compiled as vtable calls (IC call
594 // resolution will catch the illegal call) and the few legal calls
595 // on array types won't be either.
596 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
597 receiver_type, is_virtual,
598 call_does_dispatch, vtable_index); // out-parameters
599 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
600 }
601
602 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
603 ciKlass* receiver_constraint = nullptr;
604 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
605 ciInstanceKlass* calling_klass = method()->holder();
606 ciInstanceKlass* sender_klass = calling_klass;
607 if (sender_klass->is_interface()) {
608 receiver_constraint = sender_klass;
609 }
610 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
611 assert(holder->is_interface(), "How did we get a non-interface method here!");
612 receiver_constraint = holder;
613 }
614
615 if (receiver_constraint != nullptr) {
616 Node* receiver_node = stack(sp() - nargs);
617 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
618 Node* bad_type_ctrl = nullptr;
619 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
620 if (bad_type_ctrl != nullptr) {
621 PreserveJVMState pjvms(this);
622 set_control(bad_type_ctrl);
623 uncommon_trap(Deoptimization::Reason_class_check,
624 Deoptimization::Action_none);
714
715 assert(check_call_consistency(jvms, cg), "inconsistent info");
716
717 if (!stopped()) {
718 // This was some sort of virtual call, which did a null check for us.
719 // Now we can assert receiver-not-null, on the normal return path.
720 if (receiver != nullptr && cg->is_virtual()) {
721 Node* cast = cast_not_null(receiver);
722 // %%% assert(receiver == cast, "should already have cast the receiver");
723 }
724
725 ciType* rtype = cg->method()->return_type();
726 ciType* ctype = declared_signature->return_type();
727
728 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
729 // Be careful here with return types.
730 if (ctype != rtype) {
731 BasicType rt = rtype->basic_type();
732 BasicType ct = ctype->basic_type();
733 if (ct == T_VOID) {
734 // It's OK for a method to return a value that is discarded.
735 // The discarding does not require any special action from the caller.
736 // The Java code knows this, at VerifyType.isNullConversion.
737 pop_node(rt); // whatever it was, pop it
738 } else if (rt == T_INT || is_subword_type(rt)) {
739 // Nothing. These cases are handled in lambda form bytecode.
740 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
741 } else if (is_reference_type(rt)) {
742 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
743 if (ctype->is_loaded()) {
744 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
745 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
746 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
747 Node* retnode = pop();
748 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
749 push(cast_obj);
750 }
751 }
752 } else {
753 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
754 // push a zero; it's better than getting an oop/int mismatch
772 // If the return type of the method is not loaded, assert that the
773 // value we got is a null. Otherwise, we need to recompile.
774 if (!rtype->is_loaded()) {
775 if (PrintOpto && (Verbose || WizardMode)) {
776 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
777 cg->method()->print_name(); tty->cr();
778 }
779 if (C->log() != nullptr) {
780 C->log()->elem("assert_null reason='return' klass='%d'",
781 C->log()->identify(rtype));
782 }
783 // If there is going to be a trap, put it at the next bytecode:
784 set_bci(iter().next_bci());
785 null_assert(peek());
786 set_bci(iter().cur_bci()); // put it back
787 }
788 BasicType ct = ctype->basic_type();
789 if (is_reference_type(ct)) {
790 record_profiled_return_for_speculation();
791 }
792 }
793
794 // Restart record of parsing work after possible inlining of call
795 #ifndef PRODUCT
796 parse_histogram()->set_initial_state(bc());
797 #endif
798 }
799
800 //---------------------------catch_call_exceptions-----------------------------
801 // Put a Catch and CatchProj nodes behind a just-created call.
802 // Send their caught exceptions to the proper handler.
803 // This may be used after a call to the rethrow VM stub,
804 // when it is needed to process unloaded exception classes.
805 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
806 // Exceptions are delivered through this channel:
807 Node* i_o = this->i_o();
808
809 // Add a CatchNode.
810 Arena tmp_mem{mtCompiler};
811 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "ci/ciCallSite.hpp"
26 #include "ci/ciMethodHandle.hpp"
27 #include "ci/ciSymbols.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "interpreter/linkResolver.hpp"
32 #include "logging/log.hpp"
33 #include "logging/logLevel.hpp"
34 #include "logging/logMessage.hpp"
35 #include "logging/logStream.hpp"
36 #include "opto/addnode.hpp"
37 #include "opto/callGenerator.hpp"
38 #include "opto/castnode.hpp"
39 #include "opto/cfgnode.hpp"
40 #include "opto/inlinetypenode.hpp"
41 #include "opto/mulnode.hpp"
42 #include "opto/parse.hpp"
43 #include "opto/rootnode.hpp"
44 #include "opto/runtime.hpp"
45 #include "opto/subnode.hpp"
46 #include "prims/methodHandles.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "utilities/macros.hpp"
49 #if INCLUDE_JFR
50 #include "jfr/jfr.hpp"
51 #endif
52
53 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count,
54 bool with_deco) {
55 if (with_deco) {
56 CompileTask::print_inline_indent(depth, out);
57 }
58 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
59 prof_klass->name()->print_symbol_on(out);
60 if (with_deco) {
585 // Speculative type of the receiver if any
586 ciKlass* speculative_receiver_type = nullptr;
587 if (is_virtual_or_interface) {
588 Node* receiver_node = stack(sp() - nargs);
589 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
590 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
591 // For arrays, klass below is Object. When vtable calls are used,
592 // resolving the call with Object would allow an illegal call to
593 // finalize() on an array. We use holder instead: illegal calls to
594 // finalize() won't be compiled as vtable calls (IC call
595 // resolution will catch the illegal call) and the few legal calls
596 // on array types won't be either.
597 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
598 receiver_type, is_virtual,
599 call_does_dispatch, vtable_index); // out-parameters
600 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
601 }
602
603 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
604 ciKlass* receiver_constraint = nullptr;
605 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
606 ciInstanceKlass* calling_klass = method()->holder();
607 ciInstanceKlass* sender_klass = calling_klass;
608 if (sender_klass->is_interface()) {
609 receiver_constraint = sender_klass;
610 }
611 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
612 assert(holder->is_interface(), "How did we get a non-interface method here!");
613 receiver_constraint = holder;
614 }
615
616 if (receiver_constraint != nullptr) {
617 Node* receiver_node = stack(sp() - nargs);
618 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
619 Node* bad_type_ctrl = nullptr;
620 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
621 if (bad_type_ctrl != nullptr) {
622 PreserveJVMState pjvms(this);
623 set_control(bad_type_ctrl);
624 uncommon_trap(Deoptimization::Reason_class_check,
625 Deoptimization::Action_none);
715
716 assert(check_call_consistency(jvms, cg), "inconsistent info");
717
718 if (!stopped()) {
719 // This was some sort of virtual call, which did a null check for us.
720 // Now we can assert receiver-not-null, on the normal return path.
721 if (receiver != nullptr && cg->is_virtual()) {
722 Node* cast = cast_not_null(receiver);
723 // %%% assert(receiver == cast, "should already have cast the receiver");
724 }
725
726 ciType* rtype = cg->method()->return_type();
727 ciType* ctype = declared_signature->return_type();
728
729 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
730 // Be careful here with return types.
731 if (ctype != rtype) {
732 BasicType rt = rtype->basic_type();
733 BasicType ct = ctype->basic_type();
734 if (ct == T_VOID) {
735 // It's OK for a method to return a value that is discarded.
736 // The discarding does not require any special action from the caller.
737 // The Java code knows this, at VerifyType.isNullConversion.
738 pop_node(rt); // whatever it was, pop it
739 } else if (rt == T_INT || is_subword_type(rt)) {
740 // Nothing. These cases are handled in lambda form bytecode.
741 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
742 } else if (is_reference_type(rt)) {
743 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
744 if (ctype->is_loaded()) {
745 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
746 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
747 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
748 Node* retnode = pop();
749 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
750 push(cast_obj);
751 }
752 }
753 } else {
754 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
755 // push a zero; it's better than getting an oop/int mismatch
773 // If the return type of the method is not loaded, assert that the
774 // value we got is a null. Otherwise, we need to recompile.
775 if (!rtype->is_loaded()) {
776 if (PrintOpto && (Verbose || WizardMode)) {
777 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
778 cg->method()->print_name(); tty->cr();
779 }
780 if (C->log() != nullptr) {
781 C->log()->elem("assert_null reason='return' klass='%d'",
782 C->log()->identify(rtype));
783 }
784 // If there is going to be a trap, put it at the next bytecode:
785 set_bci(iter().next_bci());
786 null_assert(peek());
787 set_bci(iter().cur_bci()); // put it back
788 }
789 BasicType ct = ctype->basic_type();
790 if (is_reference_type(ct)) {
791 record_profiled_return_for_speculation();
792 }
793 if (rtype->is_inlinetype() && !peek()->is_InlineType()) {
794 Node* retnode = pop();
795 retnode = InlineTypeNode::make_from_oop(this, retnode, rtype->as_inline_klass(), !gvn().type(retnode)->maybe_null());
796 push_node(T_OBJECT, retnode);
797 }
798
799 // Note that:
800 // - The caller map is the state just before the call of the currently parsed method with all arguments
801 // on the stack. Therefore, we have caller_map->arg(0) == this.
802 // - local(0) contains the updated receiver after calling an inline type constructor.
803 // - Abstract value classes are not ciInlineKlass instances and thus abstract_value_klass->is_inlinetype() is false.
804 // We use the bottom type of the receiver node to determine if we have a value class or not.
805 const bool is_current_method_inline_type_constructor =
806 // Is current method a constructor (i.e <init>)?
807 _method->is_object_constructor() &&
808 // Is the holder of the current constructor method an inline type?
809 _caller->map()->argument(_caller, 0)->bottom_type()->is_inlinetypeptr();
810 assert(!is_current_method_inline_type_constructor || !cg->method()->is_object_constructor() || receiver != nullptr,
811 "must have valid receiver after calling another constructor");
812 if (is_current_method_inline_type_constructor &&
813 // Is the just called method an inline type constructor?
814 cg->method()->is_object_constructor() && receiver->bottom_type()->is_inlinetypeptr() &&
815 // AND:
816 // 1) ... invoked on the same receiver? Then it's another constructor on the same object doing the initialization.
817 (receiver == _caller->map()->argument(_caller, 0) ||
818 // 2) ... abstract? Then it's the call to the super constructor which eventually calls Object.<init> to
819 // finish the initialization of this larval.
820 cg->method()->holder()->is_abstract() ||
821 // 3) ... Object.<init>? Then we know it's the final call to finish the larval initialization. Other
822 // Object.<init> calls would have a non-inline-type receiver which we already excluded in the check above.
823 cg->method()->holder()->is_java_lang_Object())
824 ) {
825 assert(local(0)->is_InlineType() && receiver->bottom_type()->is_inlinetypeptr() && receiver->is_InlineType() &&
826 _caller->map()->argument(_caller, 0)->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(),
827 "Unexpected receiver");
828 InlineTypeNode* updated_receiver = local(0)->as_InlineType();
829 InlineTypeNode* cloned_updated_receiver = updated_receiver->clone_if_required(&_gvn, _map);
830 cloned_updated_receiver->set_is_larval(false);
831 cloned_updated_receiver = _gvn.transform(cloned_updated_receiver)->as_InlineType();
832 // Receiver updated by the just called constructor. We need to update the map to make the effect visible. After
833 // the super() call, only the updated receiver in local(0) will be used from now on. Therefore, we do not need
834 // to update the original receiver 'receiver' but only the 'updated_receiver'.
835 replace_in_map(updated_receiver, cloned_updated_receiver);
836
837 if (_caller->has_method()) {
838 // If the current method is inlined, we also need to update the exit map to propagate the updated receiver
839 // to the caller map.
840 Node* receiver_in_caller = _caller->map()->argument(_caller, 0);
841 assert(receiver_in_caller->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(),
842 "Receiver type mismatch");
843 _exits.map()->replace_edge(receiver_in_caller, cloned_updated_receiver, &_gvn);
844 }
845 }
846 }
847
848 // Restart record of parsing work after possible inlining of call
849 #ifndef PRODUCT
850 parse_histogram()->set_initial_state(bc());
851 #endif
852 }
853
854 //---------------------------catch_call_exceptions-----------------------------
855 // Put a Catch and CatchProj nodes behind a just-created call.
856 // Send their caught exceptions to the proper handler.
857 // This may be used after a call to the rethrow VM stub,
858 // when it is needed to process unloaded exception classes.
859 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
860 // Exceptions are delivered through this channel:
861 Node* i_o = this->i_o();
862
863 // Add a CatchNode.
864 Arena tmp_mem{mtCompiler};
865 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|