668 break;
669
670 case C1StubId::new_instance_id:
671 case C1StubId::fast_new_instance_id:
672 case C1StubId::fast_new_instance_init_check_id:
673 {
674 Register klass = r3; // Incoming
675 Register obj = r0; // Result
676
677 if (id == C1StubId::new_instance_id) {
678 __ set_info("new_instance", dont_gc_arguments);
679 } else if (id == C1StubId::fast_new_instance_id) {
680 __ set_info("fast new_instance", dont_gc_arguments);
681 } else {
682 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
683 __ set_info("fast new_instance init check", dont_gc_arguments);
684 }
685
686 __ enter();
687 OopMap* map = save_live_registers(sasm);
688 int call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
689 oop_maps = new OopMapSet();
690 oop_maps->add_gc_map(call_offset, map);
691 restore_live_registers_except_r0(sasm);
692 __ verify_oop(obj);
693 __ leave();
694 __ ret(lr);
695
696 // r0,: new instance
697 }
698
699 break;
700
701 case C1StubId::counter_overflow_id:
702 {
703 Register bci = r0, method = r1;
704 __ enter();
705 OopMap* map = save_live_registers(sasm);
706 // Retrieve bci
707 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
708 // And a pointer to the Method*
709 __ ldr(method, Address(rfp, 3*BytesPerWord));
710 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
711 oop_maps = new OopMapSet();
712 oop_maps->add_gc_map(call_offset, map);
713 restore_live_registers(sasm);
714 __ leave();
715 __ ret(lr);
716 }
717 break;
718
719 case C1StubId::new_type_array_id:
720 case C1StubId::new_object_array_id:
721 {
722 Register length = r19; // Incoming
723 Register klass = r3; // Incoming
724 Register obj = r0; // Result
725
726 if (id == C1StubId::new_type_array_id) {
727 __ set_info("new_type_array", dont_gc_arguments);
728 } else {
729 __ set_info("new_object_array", dont_gc_arguments);
730 }
731
732 #ifdef ASSERT
733 // assert object type is really an array of the proper kind
734 {
735 Label ok;
736 Register t0 = obj;
737 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
738 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
739 int tag = ((id == C1StubId::new_type_array_id)
740 ? Klass::_lh_array_tag_type_value
741 : Klass::_lh_array_tag_obj_value);
742 __ mov(rscratch1, tag);
743 __ cmpw(t0, rscratch1);
744 __ br(Assembler::EQ, ok);
745 __ stop("assert(is an array klass)");
746 __ should_not_reach_here();
747 __ bind(ok);
748 }
749 #endif // ASSERT
750
751 __ enter();
752 OopMap* map = save_live_registers(sasm);
753 int call_offset;
754 if (id == C1StubId::new_type_array_id) {
755 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
756 } else {
757 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
758 }
759
760 oop_maps = new OopMapSet();
761 oop_maps->add_gc_map(call_offset, map);
762 restore_live_registers_except_r0(sasm);
763
764 __ verify_oop(obj);
765 __ leave();
766 __ ret(lr);
767
768 // r0: new array
769 }
770 break;
771
772 case C1StubId::new_multi_array_id:
773 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
774 // r0,: klass
775 // r19,: rank
776 // r2: address of 1st dimension
777 OopMap* map = save_live_registers(sasm);
778 __ mov(c_rarg1, r0);
779 __ mov(c_rarg3, r2);
780 __ mov(c_rarg2, r19);
781 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
782
783 oop_maps = new OopMapSet();
784 oop_maps->add_gc_map(call_offset, map);
785 restore_live_registers_except_r0(sasm);
786
787 // r0,: new multi array
788 __ verify_oop(r0);
789 }
790 break;
791
792 case C1StubId::register_finalizer_id:
793 {
794 __ set_info("register_finalizer", dont_gc_arguments);
795
796 // This is called via call_runtime so the arguments
797 // will be place in C abi locations
798
799 __ verify_oop(c_rarg0);
800
801 // load the klass and check the has finalizer flag
802 Label register_finalizer;
803 Register t = r5;
804 __ load_klass(t, r0);
805 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
806 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
807 __ ret(lr);
808
809 __ bind(register_finalizer);
810 __ enter();
811 OopMap* oop_map = save_live_registers(sasm);
812 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
813 oop_maps = new OopMapSet();
814 oop_maps->add_gc_map(call_offset, oop_map);
815
816 // Now restore all the live registers
817 restore_live_registers(sasm);
818
819 __ leave();
820 __ ret(lr);
821 }
822 break;
823
824 case C1StubId::throw_class_cast_exception_id:
825 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
826 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
827 }
828 break;
829
830 case C1StubId::throw_incompatible_class_change_error_id:
831 { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments, does_not_return);
832 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
833 }
834 break;
835
836 case C1StubId::slow_subtype_check_id:
837 {
838 // Typical calling sequence:
839 // __ push(klass_RInfo); // object klass or other subclass
840 // __ push(sup_k_RInfo); // array element klass or other superclass
841 // __ bl(slow_subtype_check);
842 // Note that the subclass is pushed first, and is therefore deepest.
843 enum layout {
844 r0_off, r0_off_hi,
845 r2_off, r2_off_hi,
846 r4_off, r4_off_hi,
847 r5_off, r5_off_hi,
848 sup_k_off, sup_k_off_hi,
849 klass_off, klass_off_hi,
850 framesize,
851 result_off = sup_k_off
852 };
853
854 __ set_info("slow_subtype_check", dont_gc_arguments);
855 __ push(RegSet::of(r0, r2, r4, r5), sp);
1074 __ leave();
1075 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1076 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1077
1078 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1079 }
1080 break;
1081
1082 case C1StubId::dtrace_object_alloc_id:
1083 { // c_rarg0: object
1084 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1085 save_live_registers(sasm);
1086
1087 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1088
1089 restore_live_registers(sasm);
1090 }
1091 break;
1092
1093 default:
1094 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1095 __ mov(r0, (int)id);
1096 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1097 }
1098 break;
1099 }
1100 }
1101 return oop_maps;
1102 }
1103
1104 #undef __
1105
1106 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|
668 break;
669
670 case C1StubId::new_instance_id:
671 case C1StubId::fast_new_instance_id:
672 case C1StubId::fast_new_instance_init_check_id:
673 {
674 Register klass = r3; // Incoming
675 Register obj = r0; // Result
676
677 if (id == C1StubId::new_instance_id) {
678 __ set_info("new_instance", dont_gc_arguments);
679 } else if (id == C1StubId::fast_new_instance_id) {
680 __ set_info("fast new_instance", dont_gc_arguments);
681 } else {
682 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
683 __ set_info("fast new_instance init check", dont_gc_arguments);
684 }
685
686 __ enter();
687 OopMap* map = save_live_registers(sasm);
688 int call_offset;
689 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
690 oop_maps = new OopMapSet();
691 oop_maps->add_gc_map(call_offset, map);
692 restore_live_registers_except_r0(sasm);
693 __ verify_oop(obj);
694 __ leave();
695 __ ret(lr);
696
697 // r0,: new instance
698 }
699
700 break;
701
702 case C1StubId::counter_overflow_id:
703 {
704 Register bci = r0, method = r1;
705 __ enter();
706 OopMap* map = save_live_registers(sasm);
707 // Retrieve bci
708 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
709 // And a pointer to the Method*
710 __ ldr(method, Address(rfp, 3*BytesPerWord));
711 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
712 oop_maps = new OopMapSet();
713 oop_maps->add_gc_map(call_offset, map);
714 restore_live_registers(sasm);
715 __ leave();
716 __ ret(lr);
717 }
718 break;
719
720 case C1StubId::new_type_array_id:
721 case C1StubId::new_object_array_id:
722 case C1StubId::new_null_free_array_id:
723 {
724 Register length = r19; // Incoming
725 Register klass = r3; // Incoming
726 Register obj = r0; // Result
727
728 if (id == C1StubId::new_type_array_id) {
729 __ set_info("new_type_array", dont_gc_arguments);
730 } else if (id == C1StubId::new_object_array_id) {
731 __ set_info("new_object_array", dont_gc_arguments);
732 } else {
733 __ set_info("new_null_free_array", dont_gc_arguments);
734 }
735
736 #ifdef ASSERT
737 // assert object type is really an array of the proper kind
738 {
739 Label ok;
740 Register t0 = obj;
741 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
742 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
743 switch (id) {
744 case C1StubId::new_type_array_id:
745 __ cmpw(t0, Klass::_lh_array_tag_type_value);
746 __ br(Assembler::EQ, ok);
747 __ stop("assert(is a type array klass)");
748 break;
749 case C1StubId::new_object_array_id:
750 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // new "[Ljava/lang/Object;"
751 __ br(Assembler::EQ, ok);
752 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // new "[LVT;"
753 __ br(Assembler::EQ, ok);
754 __ stop("assert(is an object or inline type array klass)");
755 break;
756 case C1StubId::new_null_free_array_id:
757 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // the array can be a flat array.
758 __ br(Assembler::EQ, ok);
759 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // the array cannot be a flat array (due to InlineArrayElementMaxFlatSize, etc)
760 __ br(Assembler::EQ, ok);
761 __ stop("assert(is an object or inline type array klass)");
762 break;
763 default: ShouldNotReachHere();
764 }
765 __ should_not_reach_here();
766 __ bind(ok);
767 }
768 #endif // ASSERT
769
770 __ enter();
771 OopMap* map = save_live_registers(sasm);
772 int call_offset;
773 if (id == C1StubId::new_type_array_id) {
774 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
775 } else if (id == C1StubId::new_object_array_id) {
776 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
777 } else {
778 assert(id == C1StubId::new_null_free_array_id, "must be");
779 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_null_free_array), klass, length);
780 }
781
782 oop_maps = new OopMapSet();
783 oop_maps->add_gc_map(call_offset, map);
784 restore_live_registers_except_r0(sasm);
785
786 __ verify_oop(obj);
787 __ leave();
788 __ ret(lr);
789
790 // r0: new array
791 }
792 break;
793
794 case C1StubId::new_multi_array_id:
795 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
796 // r0,: klass
797 // r19,: rank
798 // r2: address of 1st dimension
799 OopMap* map = save_live_registers(sasm);
800 __ mov(c_rarg1, r0);
801 __ mov(c_rarg3, r2);
802 __ mov(c_rarg2, r19);
803 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
804
805 oop_maps = new OopMapSet();
806 oop_maps->add_gc_map(call_offset, map);
807 restore_live_registers_except_r0(sasm);
808
809 // r0,: new multi array
810 __ verify_oop(r0);
811 }
812 break;
813
814 case C1StubId::buffer_inline_args_id:
815 case C1StubId::buffer_inline_args_no_receiver_id:
816 {
817 const char* name = (id == C1StubId::buffer_inline_args_id) ?
818 "buffer_inline_args" : "buffer_inline_args_no_receiver";
819 StubFrame f(sasm, name, dont_gc_arguments);
820 OopMap* map = save_live_registers(sasm);
821 Register method = r19; // Incoming
822 address entry = (id == C1StubId::buffer_inline_args_id) ?
823 CAST_FROM_FN_PTR(address, buffer_inline_args) :
824 CAST_FROM_FN_PTR(address, buffer_inline_args_no_receiver);
825 // This is called from a C1 method's scalarized entry point
826 // where r0-r7 may be holding live argument values so we can't
827 // return the result in r0 as the other stubs do. LR is used as
828 // a temporay below to avoid the result being clobbered by
829 // restore_live_registers.
830 int call_offset = __ call_RT(lr, noreg, entry, method);
831 oop_maps = new OopMapSet();
832 oop_maps->add_gc_map(call_offset, map);
833 restore_live_registers(sasm);
834 __ mov(r20, lr);
835 __ verify_oop(r20); // r20: an array of buffered value objects
836 }
837 break;
838
839 case C1StubId::load_flat_array_id:
840 {
841 StubFrame f(sasm, "load_flat_array", dont_gc_arguments);
842 OopMap* map = save_live_registers(sasm);
843
844 // Called with store_parameter and not C abi
845
846 f.load_argument(1, r0); // r0,: array
847 f.load_argument(0, r1); // r1,: index
848 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, load_flat_array), r0, r1);
849
850 // Ensure the stores that initialize the buffer are visible
851 // before any subsequent store that publishes this reference.
852 __ membar(Assembler::StoreStore);
853
854 oop_maps = new OopMapSet();
855 oop_maps->add_gc_map(call_offset, map);
856 restore_live_registers_except_r0(sasm);
857
858 // r0: loaded element at array[index]
859 __ verify_oop(r0);
860 }
861 break;
862
863 case C1StubId::store_flat_array_id:
864 {
865 StubFrame f(sasm, "store_flat_array", dont_gc_arguments);
866 OopMap* map = save_live_registers(sasm, 4);
867
868 // Called with store_parameter and not C abi
869
870 f.load_argument(2, r0); // r0: array
871 f.load_argument(1, r1); // r1: index
872 f.load_argument(0, r2); // r2: value
873 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, store_flat_array), r0, r1, r2);
874
875 oop_maps = new OopMapSet();
876 oop_maps->add_gc_map(call_offset, map);
877 restore_live_registers_except_r0(sasm);
878 }
879 break;
880
881 case C1StubId::substitutability_check_id:
882 {
883 StubFrame f(sasm, "substitutability_check", dont_gc_arguments);
884 OopMap* map = save_live_registers(sasm);
885
886 // Called with store_parameter and not C abi
887
888 f.load_argument(1, r1); // r1,: left
889 f.load_argument(0, r2); // r2,: right
890 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, substitutability_check), r1, r2);
891
892 oop_maps = new OopMapSet();
893 oop_maps->add_gc_map(call_offset, map);
894 restore_live_registers_except_r0(sasm);
895
896 // r0,: are the two operands substitutable
897 }
898 break;
899
900 case C1StubId::register_finalizer_id:
901 {
902 __ set_info("register_finalizer", dont_gc_arguments);
903
904 // This is called via call_runtime so the arguments
905 // will be place in C abi locations
906
907 __ verify_oop(c_rarg0);
908
909 // load the klass and check the has finalizer flag
910 Label register_finalizer;
911 Register t = r5;
912 __ load_klass(t, r0);
913 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
914 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
915 __ ret(lr);
916
917 __ bind(register_finalizer);
918 __ enter();
919 OopMap* oop_map = save_live_registers(sasm);
920 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
921 oop_maps = new OopMapSet();
922 oop_maps->add_gc_map(call_offset, oop_map);
923
924 // Now restore all the live registers
925 restore_live_registers(sasm);
926
927 __ leave();
928 __ ret(lr);
929 }
930 break;
931
932 case C1StubId::throw_class_cast_exception_id:
933 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
934 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
935 }
936 break;
937
938 case C1StubId::throw_incompatible_class_change_error_id:
939 { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments, does_not_return);
940 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
941 }
942 break;
943
944 case C1StubId::throw_illegal_monitor_state_exception_id:
945 { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
946 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
947 }
948 break;
949
950 case C1StubId::throw_identity_exception_id:
951 { StubFrame f(sasm, "throw_identity_exception", dont_gc_arguments);
952 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_identity_exception), true);
953 }
954 break;
955
956 case C1StubId::slow_subtype_check_id:
957 {
958 // Typical calling sequence:
959 // __ push(klass_RInfo); // object klass or other subclass
960 // __ push(sup_k_RInfo); // array element klass or other superclass
961 // __ bl(slow_subtype_check);
962 // Note that the subclass is pushed first, and is therefore deepest.
963 enum layout {
964 r0_off, r0_off_hi,
965 r2_off, r2_off_hi,
966 r4_off, r4_off_hi,
967 r5_off, r5_off_hi,
968 sup_k_off, sup_k_off_hi,
969 klass_off, klass_off_hi,
970 framesize,
971 result_off = sup_k_off
972 };
973
974 __ set_info("slow_subtype_check", dont_gc_arguments);
975 __ push(RegSet::of(r0, r2, r4, r5), sp);
1194 __ leave();
1195 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1196 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1197
1198 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1199 }
1200 break;
1201
1202 case C1StubId::dtrace_object_alloc_id:
1203 { // c_rarg0: object
1204 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1205 save_live_registers(sasm);
1206
1207 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1208
1209 restore_live_registers(sasm);
1210 }
1211 break;
1212
1213 default:
1214 // FIXME: For unhandled trap_id this code fails with assert during vm intialization
1215 // rather than insert a call to unimplemented_entry
1216 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1217 __ mov(r0, (int)id);
1218 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1219 }
1220 break;
1221 }
1222 }
1223
1224
1225 return oop_maps;
1226 }
1227
1228 #undef __
1229
1230 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|