649 break;
650
651 case C1StubId::new_instance_id:
652 case C1StubId::fast_new_instance_id:
653 case C1StubId::fast_new_instance_init_check_id:
654 {
655 Register klass = r3; // Incoming
656 Register obj = r0; // Result
657
658 if (id == C1StubId::new_instance_id) {
659 __ set_info("new_instance", dont_gc_arguments);
660 } else if (id == C1StubId::fast_new_instance_id) {
661 __ set_info("fast new_instance", dont_gc_arguments);
662 } else {
663 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
664 __ set_info("fast new_instance init check", dont_gc_arguments);
665 }
666
667 __ enter();
668 OopMap* map = save_live_registers(sasm);
669 int call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
670 oop_maps = new OopMapSet();
671 oop_maps->add_gc_map(call_offset, map);
672 restore_live_registers_except_r0(sasm);
673 __ verify_oop(obj);
674 __ leave();
675 __ ret(lr);
676
677 // r0,: new instance
678 }
679
680 break;
681
682 case C1StubId::counter_overflow_id:
683 {
684 Register bci = r0, method = r1;
685 __ enter();
686 OopMap* map = save_live_registers(sasm);
687 // Retrieve bci
688 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
689 // And a pointer to the Method*
690 __ ldr(method, Address(rfp, 3*BytesPerWord));
691 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
692 oop_maps = new OopMapSet();
693 oop_maps->add_gc_map(call_offset, map);
694 restore_live_registers(sasm);
695 __ leave();
696 __ ret(lr);
697 }
698 break;
699
700 case C1StubId::new_type_array_id:
701 case C1StubId::new_object_array_id:
702 {
703 Register length = r19; // Incoming
704 Register klass = r3; // Incoming
705 Register obj = r0; // Result
706
707 if (id == C1StubId::new_type_array_id) {
708 __ set_info("new_type_array", dont_gc_arguments);
709 } else {
710 __ set_info("new_object_array", dont_gc_arguments);
711 }
712
713 #ifdef ASSERT
714 // assert object type is really an array of the proper kind
715 {
716 Label ok;
717 Register t0 = obj;
718 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
719 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
720 int tag = ((id == C1StubId::new_type_array_id)
721 ? Klass::_lh_array_tag_type_value
722 : Klass::_lh_array_tag_obj_value);
723 __ mov(rscratch1, tag);
724 __ cmpw(t0, rscratch1);
725 __ br(Assembler::EQ, ok);
726 __ stop("assert(is an array klass)");
727 __ should_not_reach_here();
728 __ bind(ok);
729 }
730 #endif // ASSERT
731
732 __ enter();
733 OopMap* map = save_live_registers(sasm);
734 int call_offset;
735 if (id == C1StubId::new_type_array_id) {
736 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
737 } else {
738 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
739 }
740
741 oop_maps = new OopMapSet();
742 oop_maps->add_gc_map(call_offset, map);
743 restore_live_registers_except_r0(sasm);
744
745 __ verify_oop(obj);
746 __ leave();
747 __ ret(lr);
748
749 // r0: new array
750 }
751 break;
752
753 case C1StubId::new_multi_array_id:
754 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
755 // r0,: klass
756 // r19,: rank
757 // r2: address of 1st dimension
758 OopMap* map = save_live_registers(sasm);
759 __ mov(c_rarg1, r0);
760 __ mov(c_rarg3, r2);
761 __ mov(c_rarg2, r19);
762 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
763
764 oop_maps = new OopMapSet();
765 oop_maps->add_gc_map(call_offset, map);
766 restore_live_registers_except_r0(sasm);
767
768 // r0,: new multi array
769 __ verify_oop(r0);
770 }
771 break;
772
773 case C1StubId::register_finalizer_id:
774 {
775 __ set_info("register_finalizer", dont_gc_arguments);
776
777 // This is called via call_runtime so the arguments
778 // will be place in C abi locations
779
780 __ verify_oop(c_rarg0);
781
782 // load the klass and check the has finalizer flag
783 Label register_finalizer;
784 Register t = r5;
785 __ load_klass(t, r0);
786 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
787 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
788 __ ret(lr);
789
790 __ bind(register_finalizer);
791 __ enter();
792 OopMap* oop_map = save_live_registers(sasm);
793 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
794 oop_maps = new OopMapSet();
795 oop_maps->add_gc_map(call_offset, oop_map);
796
797 // Now restore all the live registers
798 restore_live_registers(sasm);
799
800 __ leave();
801 __ ret(lr);
802 }
803 break;
804
805 case C1StubId::throw_class_cast_exception_id:
806 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
807 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
808 }
809 break;
810
811 case C1StubId::throw_incompatible_class_change_error_id:
812 { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments, does_not_return);
813 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
814 }
815 break;
816
817 case C1StubId::slow_subtype_check_id:
818 {
819 // Typical calling sequence:
820 // __ push(klass_RInfo); // object klass or other subclass
821 // __ push(sup_k_RInfo); // array element klass or other superclass
822 // __ bl(slow_subtype_check);
823 // Note that the subclass is pushed first, and is therefore deepest.
824 enum layout {
825 r0_off, r0_off_hi,
826 r2_off, r2_off_hi,
827 r4_off, r4_off_hi,
828 r5_off, r5_off_hi,
829 sup_k_off, sup_k_off_hi,
830 klass_off, klass_off_hi,
831 framesize,
832 result_off = sup_k_off
833 };
834
835 __ set_info("slow_subtype_check", dont_gc_arguments);
836 __ push(RegSet::of(r0, r2, r4, r5), sp);
1006 __ leave();
1007 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1008 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1009
1010 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1011 }
1012 break;
1013
1014 case C1StubId::dtrace_object_alloc_id:
1015 { // c_rarg0: object
1016 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1017 save_live_registers(sasm);
1018
1019 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1020
1021 restore_live_registers(sasm);
1022 }
1023 break;
1024
1025 default:
1026 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1027 __ mov(r0, (int)id);
1028 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1029 }
1030 break;
1031 }
1032 }
1033 return oop_maps;
1034 }
1035
1036 #undef __
1037
1038 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|
649 break;
650
651 case C1StubId::new_instance_id:
652 case C1StubId::fast_new_instance_id:
653 case C1StubId::fast_new_instance_init_check_id:
654 {
655 Register klass = r3; // Incoming
656 Register obj = r0; // Result
657
658 if (id == C1StubId::new_instance_id) {
659 __ set_info("new_instance", dont_gc_arguments);
660 } else if (id == C1StubId::fast_new_instance_id) {
661 __ set_info("fast new_instance", dont_gc_arguments);
662 } else {
663 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
664 __ set_info("fast new_instance init check", dont_gc_arguments);
665 }
666
667 __ enter();
668 OopMap* map = save_live_registers(sasm);
669 int call_offset;
670 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
671 oop_maps = new OopMapSet();
672 oop_maps->add_gc_map(call_offset, map);
673 restore_live_registers_except_r0(sasm);
674 __ verify_oop(obj);
675 __ leave();
676 __ ret(lr);
677
678 // r0,: new instance
679 }
680
681 break;
682
683 case C1StubId::counter_overflow_id:
684 {
685 Register bci = r0, method = r1;
686 __ enter();
687 OopMap* map = save_live_registers(sasm);
688 // Retrieve bci
689 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
690 // And a pointer to the Method*
691 __ ldr(method, Address(rfp, 3*BytesPerWord));
692 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
693 oop_maps = new OopMapSet();
694 oop_maps->add_gc_map(call_offset, map);
695 restore_live_registers(sasm);
696 __ leave();
697 __ ret(lr);
698 }
699 break;
700
701 case C1StubId::new_type_array_id:
702 case C1StubId::new_object_array_id:
703 case C1StubId::new_null_free_array_id:
704 {
705 Register length = r19; // Incoming
706 Register klass = r3; // Incoming
707 Register obj = r0; // Result
708
709 if (id == C1StubId::new_type_array_id) {
710 __ set_info("new_type_array", dont_gc_arguments);
711 } else if (id == C1StubId::new_object_array_id) {
712 __ set_info("new_object_array", dont_gc_arguments);
713 } else {
714 __ set_info("new_null_free_array", dont_gc_arguments);
715 }
716
717 #ifdef ASSERT
718 // assert object type is really an array of the proper kind
719 {
720 Label ok;
721 Register t0 = obj;
722 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
723 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
724 switch (id) {
725 case C1StubId::new_type_array_id:
726 __ cmpw(t0, Klass::_lh_array_tag_type_value);
727 __ br(Assembler::EQ, ok);
728 __ stop("assert(is a type array klass)");
729 break;
730 case C1StubId::new_object_array_id:
731 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // new "[Ljava/lang/Object;"
732 __ br(Assembler::EQ, ok);
733 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // new "[LVT;"
734 __ br(Assembler::EQ, ok);
735 __ stop("assert(is an object or inline type array klass)");
736 break;
737 case C1StubId::new_null_free_array_id:
738 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // the array can be a flat array.
739 __ br(Assembler::EQ, ok);
740 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // the array cannot be a flat array (due to InlineArrayElementMaxFlatSize, etc)
741 __ br(Assembler::EQ, ok);
742 __ stop("assert(is an object or inline type array klass)");
743 break;
744 default: ShouldNotReachHere();
745 }
746 __ should_not_reach_here();
747 __ bind(ok);
748 }
749 #endif // ASSERT
750
751 __ enter();
752 OopMap* map = save_live_registers(sasm);
753 int call_offset;
754 if (id == C1StubId::new_type_array_id) {
755 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
756 } else if (id == C1StubId::new_object_array_id) {
757 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
758 } else {
759 assert(id == C1StubId::new_null_free_array_id, "must be");
760 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_null_free_array), klass, length);
761 }
762
763 oop_maps = new OopMapSet();
764 oop_maps->add_gc_map(call_offset, map);
765 restore_live_registers_except_r0(sasm);
766
767 __ verify_oop(obj);
768 __ leave();
769 __ ret(lr);
770
771 // r0: new array
772 }
773 break;
774
775 case C1StubId::new_multi_array_id:
776 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
777 // r0,: klass
778 // r19,: rank
779 // r2: address of 1st dimension
780 OopMap* map = save_live_registers(sasm);
781 __ mov(c_rarg1, r0);
782 __ mov(c_rarg3, r2);
783 __ mov(c_rarg2, r19);
784 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
785
786 oop_maps = new OopMapSet();
787 oop_maps->add_gc_map(call_offset, map);
788 restore_live_registers_except_r0(sasm);
789
790 // r0,: new multi array
791 __ verify_oop(r0);
792 }
793 break;
794
795 case C1StubId::buffer_inline_args_id:
796 case C1StubId::buffer_inline_args_no_receiver_id:
797 {
798 const char* name = (id == C1StubId::buffer_inline_args_id) ?
799 "buffer_inline_args" : "buffer_inline_args_no_receiver";
800 StubFrame f(sasm, name, dont_gc_arguments);
801 OopMap* map = save_live_registers(sasm);
802 Register method = r19; // Incoming
803 address entry = (id == C1StubId::buffer_inline_args_id) ?
804 CAST_FROM_FN_PTR(address, buffer_inline_args) :
805 CAST_FROM_FN_PTR(address, buffer_inline_args_no_receiver);
806 // This is called from a C1 method's scalarized entry point
807 // where r0-r7 may be holding live argument values so we can't
808 // return the result in r0 as the other stubs do. LR is used as
809 // a temporay below to avoid the result being clobbered by
810 // restore_live_registers.
811 int call_offset = __ call_RT(lr, noreg, entry, method);
812 oop_maps = new OopMapSet();
813 oop_maps->add_gc_map(call_offset, map);
814 restore_live_registers(sasm);
815 __ mov(r20, lr);
816 __ verify_oop(r20); // r20: an array of buffered value objects
817 }
818 break;
819
820 case C1StubId::load_flat_array_id:
821 {
822 StubFrame f(sasm, "load_flat_array", dont_gc_arguments);
823 OopMap* map = save_live_registers(sasm);
824
825 // Called with store_parameter and not C abi
826
827 f.load_argument(1, r0); // r0,: array
828 f.load_argument(0, r1); // r1,: index
829 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, load_flat_array), r0, r1);
830
831 // Ensure the stores that initialize the buffer are visible
832 // before any subsequent store that publishes this reference.
833 __ membar(Assembler::StoreStore);
834
835 oop_maps = new OopMapSet();
836 oop_maps->add_gc_map(call_offset, map);
837 restore_live_registers_except_r0(sasm);
838
839 // r0: loaded element at array[index]
840 __ verify_oop(r0);
841 }
842 break;
843
844 case C1StubId::store_flat_array_id:
845 {
846 StubFrame f(sasm, "store_flat_array", dont_gc_arguments);
847 OopMap* map = save_live_registers(sasm, 4);
848
849 // Called with store_parameter and not C abi
850
851 f.load_argument(2, r0); // r0: array
852 f.load_argument(1, r1); // r1: index
853 f.load_argument(0, r2); // r2: value
854 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, store_flat_array), r0, r1, r2);
855
856 oop_maps = new OopMapSet();
857 oop_maps->add_gc_map(call_offset, map);
858 restore_live_registers_except_r0(sasm);
859 }
860 break;
861
862 case C1StubId::substitutability_check_id:
863 {
864 StubFrame f(sasm, "substitutability_check", dont_gc_arguments);
865 OopMap* map = save_live_registers(sasm);
866
867 // Called with store_parameter and not C abi
868
869 f.load_argument(1, r1); // r1,: left
870 f.load_argument(0, r2); // r2,: right
871 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, substitutability_check), r1, r2);
872
873 oop_maps = new OopMapSet();
874 oop_maps->add_gc_map(call_offset, map);
875 restore_live_registers_except_r0(sasm);
876
877 // r0,: are the two operands substitutable
878 }
879 break;
880
881 case C1StubId::register_finalizer_id:
882 {
883 __ set_info("register_finalizer", dont_gc_arguments);
884
885 // This is called via call_runtime so the arguments
886 // will be place in C abi locations
887
888 __ verify_oop(c_rarg0);
889
890 // load the klass and check the has finalizer flag
891 Label register_finalizer;
892 Register t = r5;
893 __ load_klass(t, r0);
894 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
895 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
896 __ ret(lr);
897
898 __ bind(register_finalizer);
899 __ enter();
900 OopMap* oop_map = save_live_registers(sasm);
901 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
902 oop_maps = new OopMapSet();
903 oop_maps->add_gc_map(call_offset, oop_map);
904
905 // Now restore all the live registers
906 restore_live_registers(sasm);
907
908 __ leave();
909 __ ret(lr);
910 }
911 break;
912
913 case C1StubId::throw_class_cast_exception_id:
914 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
915 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
916 }
917 break;
918
919 case C1StubId::throw_incompatible_class_change_error_id:
920 { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments, does_not_return);
921 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
922 }
923 break;
924
925 case C1StubId::throw_illegal_monitor_state_exception_id:
926 { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
927 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
928 }
929 break;
930
931 case C1StubId::throw_identity_exception_id:
932 { StubFrame f(sasm, "throw_identity_exception", dont_gc_arguments);
933 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_identity_exception), true);
934 }
935 break;
936
937 case C1StubId::slow_subtype_check_id:
938 {
939 // Typical calling sequence:
940 // __ push(klass_RInfo); // object klass or other subclass
941 // __ push(sup_k_RInfo); // array element klass or other superclass
942 // __ bl(slow_subtype_check);
943 // Note that the subclass is pushed first, and is therefore deepest.
944 enum layout {
945 r0_off, r0_off_hi,
946 r2_off, r2_off_hi,
947 r4_off, r4_off_hi,
948 r5_off, r5_off_hi,
949 sup_k_off, sup_k_off_hi,
950 klass_off, klass_off_hi,
951 framesize,
952 result_off = sup_k_off
953 };
954
955 __ set_info("slow_subtype_check", dont_gc_arguments);
956 __ push(RegSet::of(r0, r2, r4, r5), sp);
1126 __ leave();
1127 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1128 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1129
1130 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1131 }
1132 break;
1133
1134 case C1StubId::dtrace_object_alloc_id:
1135 { // c_rarg0: object
1136 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1137 save_live_registers(sasm);
1138
1139 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1140
1141 restore_live_registers(sasm);
1142 }
1143 break;
1144
1145 default:
1146 // FIXME: For unhandled trap_id this code fails with assert during vm intialization
1147 // rather than insert a call to unimplemented_entry
1148 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1149 __ mov(r0, (int)id);
1150 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1151 }
1152 break;
1153 }
1154 }
1155
1156
1157 return oop_maps;
1158 }
1159
1160 #undef __
1161
1162 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|