21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciCallSite.hpp"
27 #include "ci/ciMethodHandle.hpp"
28 #include "ci/ciSymbols.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "compiler/compileLog.hpp"
32 #include "interpreter/linkResolver.hpp"
33 #include "logging/log.hpp"
34 #include "logging/logLevel.hpp"
35 #include "logging/logMessage.hpp"
36 #include "logging/logStream.hpp"
37 #include "opto/addnode.hpp"
38 #include "opto/callGenerator.hpp"
39 #include "opto/castnode.hpp"
40 #include "opto/cfgnode.hpp"
41 #include "opto/mulnode.hpp"
42 #include "opto/parse.hpp"
43 #include "opto/rootnode.hpp"
44 #include "opto/runtime.hpp"
45 #include "opto/subnode.hpp"
46 #include "prims/methodHandles.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "utilities/macros.hpp"
49 #if INCLUDE_JFR
50 #include "jfr/jfr.hpp"
51 #endif
52
53 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
54 CompileTask::print_inline_indent(depth, out);
55 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
56 prof_klass->name()->print_symbol_on(out);
57 out->cr();
58 }
59
60 static void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,
580 // Speculative type of the receiver if any
581 ciKlass* speculative_receiver_type = nullptr;
582 if (is_virtual_or_interface) {
583 Node* receiver_node = stack(sp() - nargs);
584 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
585 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
586 // For arrays, klass below is Object. When vtable calls are used,
587 // resolving the call with Object would allow an illegal call to
588 // finalize() on an array. We use holder instead: illegal calls to
589 // finalize() won't be compiled as vtable calls (IC call
590 // resolution will catch the illegal call) and the few legal calls
591 // on array types won't be either.
592 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
593 receiver_type, is_virtual,
594 call_does_dispatch, vtable_index); // out-parameters
595 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
596 }
597
598 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
599 ciKlass* receiver_constraint = nullptr;
600 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_initializer()) {
601 ciInstanceKlass* calling_klass = method()->holder();
602 ciInstanceKlass* sender_klass = calling_klass;
603 if (sender_klass->is_interface()) {
604 receiver_constraint = sender_klass;
605 }
606 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
607 assert(holder->is_interface(), "How did we get a non-interface method here!");
608 receiver_constraint = holder;
609 }
610
611 if (receiver_constraint != nullptr) {
612 Node* receiver_node = stack(sp() - nargs);
613 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
614 Node* bad_type_ctrl = nullptr;
615 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
616 if (bad_type_ctrl != nullptr) {
617 PreserveJVMState pjvms(this);
618 set_control(bad_type_ctrl);
619 uncommon_trap(Deoptimization::Reason_class_check,
620 Deoptimization::Action_none);
710
711 assert(check_call_consistency(jvms, cg), "inconsistent info");
712
713 if (!stopped()) {
714 // This was some sort of virtual call, which did a null check for us.
715 // Now we can assert receiver-not-null, on the normal return path.
716 if (receiver != nullptr && cg->is_virtual()) {
717 Node* cast = cast_not_null(receiver);
718 // %%% assert(receiver == cast, "should already have cast the receiver");
719 }
720
721 ciType* rtype = cg->method()->return_type();
722 ciType* ctype = declared_signature->return_type();
723
724 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
725 // Be careful here with return types.
726 if (ctype != rtype) {
727 BasicType rt = rtype->basic_type();
728 BasicType ct = ctype->basic_type();
729 if (ct == T_VOID) {
730 // It's OK for a method to return a value that is discarded.
731 // The discarding does not require any special action from the caller.
732 // The Java code knows this, at VerifyType.isNullConversion.
733 pop_node(rt); // whatever it was, pop it
734 } else if (rt == T_INT || is_subword_type(rt)) {
735 // Nothing. These cases are handled in lambda form bytecode.
736 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
737 } else if (is_reference_type(rt)) {
738 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
739 if (ctype->is_loaded()) {
740 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
741 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
742 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
743 Node* retnode = pop();
744 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
745 push(cast_obj);
746 }
747 }
748 } else {
749 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
750 // push a zero; it's better than getting an oop/int mismatch
768 // If the return type of the method is not loaded, assert that the
769 // value we got is a null. Otherwise, we need to recompile.
770 if (!rtype->is_loaded()) {
771 if (PrintOpto && (Verbose || WizardMode)) {
772 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
773 cg->method()->print_name(); tty->cr();
774 }
775 if (C->log() != nullptr) {
776 C->log()->elem("assert_null reason='return' klass='%d'",
777 C->log()->identify(rtype));
778 }
779 // If there is going to be a trap, put it at the next bytecode:
780 set_bci(iter().next_bci());
781 null_assert(peek());
782 set_bci(iter().cur_bci()); // put it back
783 }
784 BasicType ct = ctype->basic_type();
785 if (is_reference_type(ct)) {
786 record_profiled_return_for_speculation();
787 }
788 }
789
790 // Restart record of parsing work after possible inlining of call
791 #ifndef PRODUCT
792 parse_histogram()->set_initial_state(bc());
793 #endif
794 }
795
796 //---------------------------catch_call_exceptions-----------------------------
797 // Put a Catch and CatchProj nodes behind a just-created call.
798 // Send their caught exceptions to the proper handler.
799 // This may be used after a call to the rethrow VM stub,
800 // when it is needed to process unloaded exception classes.
801 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
802 // Exceptions are delivered through this channel:
803 Node* i_o = this->i_o();
804
805 // Add a CatchNode.
806 Arena tmp_mem{mtCompiler};
807 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciCallSite.hpp"
27 #include "ci/ciMethodHandle.hpp"
28 #include "ci/ciSymbols.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "compiler/compileLog.hpp"
32 #include "interpreter/linkResolver.hpp"
33 #include "logging/log.hpp"
34 #include "logging/logLevel.hpp"
35 #include "logging/logMessage.hpp"
36 #include "logging/logStream.hpp"
37 #include "opto/addnode.hpp"
38 #include "opto/callGenerator.hpp"
39 #include "opto/castnode.hpp"
40 #include "opto/cfgnode.hpp"
41 #include "opto/inlinetypenode.hpp"
42 #include "opto/mulnode.hpp"
43 #include "opto/parse.hpp"
44 #include "opto/rootnode.hpp"
45 #include "opto/runtime.hpp"
46 #include "opto/subnode.hpp"
47 #include "prims/methodHandles.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "utilities/macros.hpp"
50 #if INCLUDE_JFR
51 #include "jfr/jfr.hpp"
52 #endif
53
54 static void print_trace_type_profile(outputStream* out, int depth, ciKlass* prof_klass, int site_count, int receiver_count) {
55 CompileTask::print_inline_indent(depth, out);
56 out->print(" \\-> TypeProfile (%d/%d counts) = ", receiver_count, site_count);
57 prof_klass->name()->print_symbol_on(out);
58 out->cr();
59 }
60
61 static void trace_type_profile(Compile* C, ciMethod* method, int depth, int bci, ciMethod* prof_method,
581 // Speculative type of the receiver if any
582 ciKlass* speculative_receiver_type = nullptr;
583 if (is_virtual_or_interface) {
584 Node* receiver_node = stack(sp() - nargs);
585 const TypeOopPtr* receiver_type = _gvn.type(receiver_node)->isa_oopptr();
586 // call_does_dispatch and vtable_index are out-parameters. They might be changed.
587 // For arrays, klass below is Object. When vtable calls are used,
588 // resolving the call with Object would allow an illegal call to
589 // finalize() on an array. We use holder instead: illegal calls to
590 // finalize() won't be compiled as vtable calls (IC call
591 // resolution will catch the illegal call) and the few legal calls
592 // on array types won't be either.
593 callee = C->optimize_virtual_call(method(), klass, holder, orig_callee,
594 receiver_type, is_virtual,
595 call_does_dispatch, vtable_index); // out-parameters
596 speculative_receiver_type = receiver_type != nullptr ? receiver_type->speculative_type() : nullptr;
597 }
598
599 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
600 ciKlass* receiver_constraint = nullptr;
601 if (iter().cur_bc_raw() == Bytecodes::_invokespecial && !orig_callee->is_object_constructor()) {
602 ciInstanceKlass* calling_klass = method()->holder();
603 ciInstanceKlass* sender_klass = calling_klass;
604 if (sender_klass->is_interface()) {
605 receiver_constraint = sender_klass;
606 }
607 } else if (iter().cur_bc_raw() == Bytecodes::_invokeinterface && orig_callee->is_private()) {
608 assert(holder->is_interface(), "How did we get a non-interface method here!");
609 receiver_constraint = holder;
610 }
611
612 if (receiver_constraint != nullptr) {
613 Node* receiver_node = stack(sp() - nargs);
614 Node* cls_node = makecon(TypeKlassPtr::make(receiver_constraint, Type::trust_interfaces));
615 Node* bad_type_ctrl = nullptr;
616 Node* casted_receiver = gen_checkcast(receiver_node, cls_node, &bad_type_ctrl);
617 if (bad_type_ctrl != nullptr) {
618 PreserveJVMState pjvms(this);
619 set_control(bad_type_ctrl);
620 uncommon_trap(Deoptimization::Reason_class_check,
621 Deoptimization::Action_none);
711
712 assert(check_call_consistency(jvms, cg), "inconsistent info");
713
714 if (!stopped()) {
715 // This was some sort of virtual call, which did a null check for us.
716 // Now we can assert receiver-not-null, on the normal return path.
717 if (receiver != nullptr && cg->is_virtual()) {
718 Node* cast = cast_not_null(receiver);
719 // %%% assert(receiver == cast, "should already have cast the receiver");
720 }
721
722 ciType* rtype = cg->method()->return_type();
723 ciType* ctype = declared_signature->return_type();
724
725 if (Bytecodes::has_optional_appendix(iter().cur_bc_raw()) || is_signature_polymorphic) {
726 // Be careful here with return types.
727 if (ctype != rtype) {
728 BasicType rt = rtype->basic_type();
729 BasicType ct = ctype->basic_type();
730 if (ct == T_VOID) {
731 // It's OK for a method to return a value that is discarded.
732 // The discarding does not require any special action from the caller.
733 // The Java code knows this, at VerifyType.isNullConversion.
734 pop_node(rt); // whatever it was, pop it
735 } else if (rt == T_INT || is_subword_type(rt)) {
736 // Nothing. These cases are handled in lambda form bytecode.
737 assert(ct == T_INT || is_subword_type(ct), "must match: rt=%s, ct=%s", type2name(rt), type2name(ct));
738 } else if (is_reference_type(rt)) {
739 assert(is_reference_type(ct), "rt=%s, ct=%s", type2name(rt), type2name(ct));
740 if (ctype->is_loaded()) {
741 const TypeOopPtr* arg_type = TypeOopPtr::make_from_klass(rtype->as_klass());
742 const Type* sig_type = TypeOopPtr::make_from_klass(ctype->as_klass());
743 if (arg_type != nullptr && !arg_type->higher_equal(sig_type)) {
744 Node* retnode = pop();
745 Node* cast_obj = _gvn.transform(new CheckCastPPNode(control(), retnode, sig_type));
746 push(cast_obj);
747 }
748 }
749 } else {
750 assert(rt == ct, "unexpected mismatch: rt=%s, ct=%s", type2name(rt), type2name(ct));
751 // push a zero; it's better than getting an oop/int mismatch
769 // If the return type of the method is not loaded, assert that the
770 // value we got is a null. Otherwise, we need to recompile.
771 if (!rtype->is_loaded()) {
772 if (PrintOpto && (Verbose || WizardMode)) {
773 method()->print_name(); tty->print_cr(" asserting nullness of result at bci: %d", bci());
774 cg->method()->print_name(); tty->cr();
775 }
776 if (C->log() != nullptr) {
777 C->log()->elem("assert_null reason='return' klass='%d'",
778 C->log()->identify(rtype));
779 }
780 // If there is going to be a trap, put it at the next bytecode:
781 set_bci(iter().next_bci());
782 null_assert(peek());
783 set_bci(iter().cur_bci()); // put it back
784 }
785 BasicType ct = ctype->basic_type();
786 if (is_reference_type(ct)) {
787 record_profiled_return_for_speculation();
788 }
789 if (rtype->is_inlinetype() && !peek()->is_InlineType()) {
790 Node* retnode = pop();
791 retnode = InlineTypeNode::make_from_oop(this, retnode, rtype->as_inline_klass(), !gvn().type(retnode)->maybe_null());
792 push_node(T_OBJECT, retnode);
793 }
794
795 // Note that:
796 // - The caller map is the state just before the call of the currently parsed method with all arguments
797 // on the stack. Therefore, we have caller_map->arg(0) == this.
798 // - local(0) contains the updated receiver after calling an inline type constructor.
799 // - Abstract value classes are not ciInlineKlass instances and thus abstract_value_klass->is_inlinetype() is false.
800 // We use the bottom type of the receiver node to determine if we have a value class or not.
801 const bool is_current_method_inline_type_constructor =
802 // Is current method a constructor (i.e <init>)?
803 _method->is_object_constructor() &&
804 // Is the holder of the current constructor method an inline type?
805 _caller->map()->argument(_caller, 0)->bottom_type()->is_inlinetypeptr();
806 assert(!is_current_method_inline_type_constructor || !cg->method()->is_object_constructor() || receiver != nullptr,
807 "must have valid receiver after calling another constructor");
808 if (is_current_method_inline_type_constructor &&
809 // Is the just called method an inline type constructor?
810 cg->method()->is_object_constructor() && receiver->bottom_type()->is_inlinetypeptr() &&
811 // AND:
812 // 1) ... invoked on the same receiver? Then it's another constructor on the same object doing the initialization.
813 (receiver == _caller->map()->argument(_caller, 0) ||
814 // 2) ... abstract? Then it's the call to the super constructor which eventually calls Object.<init> to
815 // finish the initialization of this larval.
816 cg->method()->holder()->is_abstract() ||
817 // 3) ... Object.<init>? Then we know it's the final call to finish the larval initialization. Other
818 // Object.<init> calls would have a non-inline-type receiver which we already excluded in the check above.
819 cg->method()->holder()->is_java_lang_Object())
820 ) {
821 assert(local(0)->is_InlineType() && receiver->bottom_type()->is_inlinetypeptr() && receiver->is_InlineType() &&
822 _caller->map()->argument(_caller, 0)->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(),
823 "Unexpected receiver");
824 InlineTypeNode* updated_receiver = local(0)->as_InlineType();
825 InlineTypeNode* cloned_updated_receiver = updated_receiver->clone_if_required(&_gvn, _map);
826 cloned_updated_receiver->set_is_larval(false);
827 cloned_updated_receiver = _gvn.transform(cloned_updated_receiver)->as_InlineType();
828 // Receiver updated by the just called constructor. We need to update the map to make the effect visible. After
829 // the super() call, only the updated receiver in local(0) will be used from now on. Therefore, we do not need
830 // to update the original receiver 'receiver' but only the 'updated_receiver'.
831 replace_in_map(updated_receiver, cloned_updated_receiver);
832
833 if (_caller->has_method()) {
834 // If the current method is inlined, we also need to update the exit map to propagate the updated receiver
835 // to the caller map.
836 Node* receiver_in_caller = _caller->map()->argument(_caller, 0);
837 assert(receiver_in_caller->bottom_type()->inline_klass() == receiver->bottom_type()->inline_klass(),
838 "Receiver type mismatch");
839 _exits.map()->replace_edge(receiver_in_caller, cloned_updated_receiver, &_gvn);
840 }
841 }
842 }
843
844 // Restart record of parsing work after possible inlining of call
845 #ifndef PRODUCT
846 parse_histogram()->set_initial_state(bc());
847 #endif
848 }
849
850 //---------------------------catch_call_exceptions-----------------------------
851 // Put a Catch and CatchProj nodes behind a just-created call.
852 // Send their caught exceptions to the proper handler.
853 // This may be used after a call to the rethrow VM stub,
854 // when it is needed to process unloaded exception classes.
855 void Parse::catch_call_exceptions(ciExceptionHandlerStream& handlers) {
856 // Exceptions are delivered through this channel:
857 Node* i_o = this->i_o();
858
859 // Add a CatchNode.
860 Arena tmp_mem{mtCompiler};
861 GrowableArray<int> bcis(&tmp_mem, 8, 0, -1);
|