668 break;
669
670 case C1StubId::new_instance_id:
671 case C1StubId::fast_new_instance_id:
672 case C1StubId::fast_new_instance_init_check_id:
673 {
674 Register klass = r3; // Incoming
675 Register obj = r0; // Result
676
677 if (id == C1StubId::new_instance_id) {
678 __ set_info("new_instance", dont_gc_arguments);
679 } else if (id == C1StubId::fast_new_instance_id) {
680 __ set_info("fast new_instance", dont_gc_arguments);
681 } else {
682 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
683 __ set_info("fast new_instance init check", dont_gc_arguments);
684 }
685
686 __ enter();
687 OopMap* map = save_live_registers(sasm);
688 int call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
689 oop_maps = new OopMapSet();
690 oop_maps->add_gc_map(call_offset, map);
691 restore_live_registers_except_r0(sasm);
692 __ verify_oop(obj);
693 __ leave();
694 __ ret(lr);
695
696 // r0,: new instance
697 }
698
699 break;
700
701 case C1StubId::counter_overflow_id:
702 {
703 Register bci = r0, method = r1;
704 __ enter();
705 OopMap* map = save_live_registers(sasm);
706 // Retrieve bci
707 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
708 // And a pointer to the Method*
709 __ ldr(method, Address(rfp, 3*BytesPerWord));
710 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
711 oop_maps = new OopMapSet();
712 oop_maps->add_gc_map(call_offset, map);
713 restore_live_registers(sasm);
714 __ leave();
715 __ ret(lr);
716 }
717 break;
718
719 case C1StubId::new_type_array_id:
720 case C1StubId::new_object_array_id:
721 {
722 Register length = r19; // Incoming
723 Register klass = r3; // Incoming
724 Register obj = r0; // Result
725
726 if (id == C1StubId::new_type_array_id) {
727 __ set_info("new_type_array", dont_gc_arguments);
728 } else {
729 __ set_info("new_object_array", dont_gc_arguments);
730 }
731
732 #ifdef ASSERT
733 // assert object type is really an array of the proper kind
734 {
735 Label ok;
736 Register t0 = obj;
737 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
738 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
739 int tag = ((id == C1StubId::new_type_array_id)
740 ? Klass::_lh_array_tag_type_value
741 : Klass::_lh_array_tag_obj_value);
742 __ mov(rscratch1, tag);
743 __ cmpw(t0, rscratch1);
744 __ br(Assembler::EQ, ok);
745 __ stop("assert(is an array klass)");
746 __ should_not_reach_here();
747 __ bind(ok);
748 }
749 #endif // ASSERT
750
751 __ enter();
752 OopMap* map = save_live_registers(sasm);
753 int call_offset;
754 if (id == C1StubId::new_type_array_id) {
755 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
756 } else {
757 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
758 }
759
760 oop_maps = new OopMapSet();
761 oop_maps->add_gc_map(call_offset, map);
762 restore_live_registers_except_r0(sasm);
763
764 __ verify_oop(obj);
765 __ leave();
766 __ ret(lr);
767
768 // r0: new array
769 }
770 break;
771
772 case C1StubId::new_multi_array_id:
773 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
774 // r0,: klass
775 // r19,: rank
776 // r2: address of 1st dimension
777 OopMap* map = save_live_registers(sasm);
778 __ mov(c_rarg1, r0);
779 __ mov(c_rarg3, r2);
780 __ mov(c_rarg2, r19);
781 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
782
783 oop_maps = new OopMapSet();
784 oop_maps->add_gc_map(call_offset, map);
785 restore_live_registers_except_r0(sasm);
786
787 // r0,: new multi array
788 __ verify_oop(r0);
789 }
790 break;
791
792 case C1StubId::register_finalizer_id:
793 {
794 __ set_info("register_finalizer", dont_gc_arguments);
795
796 // This is called via call_runtime so the arguments
797 // will be place in C abi locations
798
799 __ verify_oop(c_rarg0);
800
801 // load the klass and check the has finalizer flag
802 Label register_finalizer;
803 Register t = r5;
804 __ load_klass(t, r0);
805 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
806 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
807 __ ret(lr);
808
809 __ bind(register_finalizer);
810 __ enter();
811 OopMap* oop_map = save_live_registers(sasm);
812 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
813 oop_maps = new OopMapSet();
814 oop_maps->add_gc_map(call_offset, oop_map);
815
816 // Now restore all the live registers
817 restore_live_registers(sasm);
818
819 __ leave();
820 __ ret(lr);
821 }
822 break;
823
824 case C1StubId::throw_class_cast_exception_id:
825 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
826 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
827 }
828 break;
829
830 case C1StubId::throw_incompatible_class_change_error_id:
831 { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments, does_not_return);
832 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
833 }
834 break;
835
836 case C1StubId::slow_subtype_check_id:
837 {
838 // Typical calling sequence:
839 // __ push(klass_RInfo); // object klass or other subclass
840 // __ push(sup_k_RInfo); // array element klass or other superclass
841 // __ bl(slow_subtype_check);
842 // Note that the subclass is pushed first, and is therefore deepest.
843 enum layout {
844 r0_off, r0_off_hi,
845 r2_off, r2_off_hi,
846 r4_off, r4_off_hi,
847 r5_off, r5_off_hi,
848 sup_k_off, sup_k_off_hi,
849 klass_off, klass_off_hi,
850 framesize,
851 result_off = sup_k_off
852 };
853
854 __ set_info("slow_subtype_check", dont_gc_arguments);
855 __ push(RegSet::of(r0, r2, r4, r5), sp);
1074 __ leave();
1075 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1076 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1077
1078 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1079 }
1080 break;
1081
1082 case C1StubId::dtrace_object_alloc_id:
1083 { // c_rarg0: object
1084 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1085 save_live_registers(sasm);
1086
1087 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1088
1089 restore_live_registers(sasm);
1090 }
1091 break;
1092
1093 default:
1094 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1095 __ mov(r0, (int)id);
1096 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1097 }
1098 break;
1099 }
1100 }
1101 return oop_maps;
1102 }
1103
1104 #undef __
1105
1106 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|
668 break;
669
670 case C1StubId::new_instance_id:
671 case C1StubId::fast_new_instance_id:
672 case C1StubId::fast_new_instance_init_check_id:
673 {
674 Register klass = r3; // Incoming
675 Register obj = r0; // Result
676
677 if (id == C1StubId::new_instance_id) {
678 __ set_info("new_instance", dont_gc_arguments);
679 } else if (id == C1StubId::fast_new_instance_id) {
680 __ set_info("fast new_instance", dont_gc_arguments);
681 } else {
682 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
683 __ set_info("fast new_instance init check", dont_gc_arguments);
684 }
685
686 __ enter();
687 OopMap* map = save_live_registers(sasm);
688 int call_offset;
689 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
690 oop_maps = new OopMapSet();
691 oop_maps->add_gc_map(call_offset, map);
692 restore_live_registers_except_r0(sasm);
693 __ verify_oop(obj);
694 __ leave();
695 __ ret(lr);
696
697 // r0,: new instance
698 }
699
700 break;
701
702 case C1StubId::counter_overflow_id:
703 {
704 Register bci = r0, method = r1;
705 __ enter();
706 OopMap* map = save_live_registers(sasm);
707 // Retrieve bci
708 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
709 // And a pointer to the Method*
710 __ ldr(method, Address(rfp, 3*BytesPerWord));
711 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
712 oop_maps = new OopMapSet();
713 oop_maps->add_gc_map(call_offset, map);
714 restore_live_registers(sasm);
715 __ leave();
716 __ ret(lr);
717 }
718 break;
719
720 case C1StubId::new_type_array_id:
721 case C1StubId::new_object_array_id:
722 case C1StubId::new_null_free_array_id:
723 {
724 Register length = r19; // Incoming
725 Register klass = r3; // Incoming
726 Register obj = r0; // Result
727
728 if (id == C1StubId::new_type_array_id) {
729 __ set_info("new_type_array", dont_gc_arguments);
730 } else if (id == C1StubId::new_object_array_id) {
731 __ set_info("new_object_array", dont_gc_arguments);
732 } else {
733 __ set_info("new_null_free_array", dont_gc_arguments);
734 }
735
736 #ifdef ASSERT
737 // assert object type is really an array of the proper kind
738 {
739 Label ok;
740 Register t0 = obj;
741 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
742 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
743 switch (id) {
744 case C1StubId::new_type_array_id:
745 __ cmpw(t0, Klass::_lh_array_tag_type_value);
746 __ br(Assembler::EQ, ok);
747 __ stop("assert(is a type array klass)");
748 break;
749 case C1StubId::new_object_array_id:
750 __ cmpw(t0, Klass::_lh_array_tag_ref_value); // new "[Ljava/lang/Object;"
751 __ br(Assembler::EQ, ok);
752 __ cmpw(t0, Klass::_lh_array_tag_flat_value); // new "[LVT;"
753 __ br(Assembler::EQ, ok);
754 __ stop("assert(is an object or inline type array klass)");
755 break;
756 case C1StubId::new_null_free_array_id:
757 __ cmpw(t0, Klass::_lh_array_tag_flat_value); // the array can be a flat array.
758 __ br(Assembler::EQ, ok);
759 __ cmpw(t0, Klass::_lh_array_tag_ref_value); // the array cannot be a flat array (due to the InlineArrayElementMaxFlatSize, etc.)
760 __ br(Assembler::EQ, ok);
761 __ stop("assert(is an object or inline type array klass)");
762 break;
763 default: ShouldNotReachHere();
764 }
765 __ should_not_reach_here();
766 __ bind(ok);
767 }
768 #endif // ASSERT
769
770 __ enter();
771 OopMap* map = save_live_registers(sasm);
772 int call_offset;
773 if (id == C1StubId::new_type_array_id) {
774 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
775 } else if (id == C1StubId::new_object_array_id) {
776 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
777 } else {
778 assert(id == C1StubId::new_null_free_array_id, "must be");
779 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_null_free_array), klass, length);
780 }
781
782 oop_maps = new OopMapSet();
783 oop_maps->add_gc_map(call_offset, map);
784 restore_live_registers_except_r0(sasm);
785
786 __ verify_oop(obj);
787 __ leave();
788 __ ret(lr);
789
790 // r0: new array
791 }
792 break;
793
794 case C1StubId::new_multi_array_id:
795 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
796 // r0,: klass
797 // r19,: rank
798 // r2: address of 1st dimension
799 OopMap* map = save_live_registers(sasm);
800 __ mov(c_rarg1, r0);
801 __ mov(c_rarg3, r2);
802 __ mov(c_rarg2, r19);
803 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
804
805 oop_maps = new OopMapSet();
806 oop_maps->add_gc_map(call_offset, map);
807 restore_live_registers_except_r0(sasm);
808
809 // r0,: new multi array
810 __ verify_oop(r0);
811 }
812 break;
813
814 case C1StubId::buffer_inline_args_id:
815 case C1StubId::buffer_inline_args_no_receiver_id:
816 {
817 const char* name = (id == C1StubId::buffer_inline_args_id) ?
818 "buffer_inline_args" : "buffer_inline_args_no_receiver";
819 StubFrame f(sasm, name, dont_gc_arguments);
820 OopMap* map = save_live_registers(sasm);
821 Register method = r19; // Incoming
822 address entry = (id == C1StubId::buffer_inline_args_id) ?
823 CAST_FROM_FN_PTR(address, buffer_inline_args) :
824 CAST_FROM_FN_PTR(address, buffer_inline_args_no_receiver);
825 // This is called from a C1 method's scalarized entry point
826 // where r0-r7 may be holding live argument values so we can't
827 // return the result in r0 as the other stubs do. LR is used as
828 // a temporary below to avoid the result being clobbered by
829 // restore_live_registers. It's saved and restored by
830 // StubAssembler::prologue and epilogue anyway.
831 int call_offset = __ call_RT(lr, noreg, entry, method);
832 oop_maps = new OopMapSet();
833 oop_maps->add_gc_map(call_offset, map);
834 restore_live_registers(sasm);
835 __ mov(r20, lr);
836 __ verify_oop(r20); // r20: an array of buffered value objects
837 }
838 break;
839
840 case C1StubId::load_flat_array_id:
841 {
842 StubFrame f(sasm, "load_flat_array", dont_gc_arguments);
843 OopMap* map = save_live_registers(sasm);
844
845 // Called with store_parameter and not C abi
846
847 f.load_argument(1, r0); // r0,: array
848 f.load_argument(0, r1); // r1,: index
849 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, load_flat_array), r0, r1);
850
851 // Ensure the stores that initialize the buffer are visible
852 // before any subsequent store that publishes this reference.
853 __ membar(Assembler::StoreStore);
854
855 oop_maps = new OopMapSet();
856 oop_maps->add_gc_map(call_offset, map);
857 restore_live_registers_except_r0(sasm);
858
859 // r0: loaded element at array[index]
860 __ verify_oop(r0);
861 }
862 break;
863
864 case C1StubId::store_flat_array_id:
865 {
866 StubFrame f(sasm, "store_flat_array", dont_gc_arguments);
867 OopMap* map = save_live_registers(sasm, 4);
868
869 // Called with store_parameter and not C abi
870
871 f.load_argument(2, r0); // r0: array
872 f.load_argument(1, r1); // r1: index
873 f.load_argument(0, r2); // r2: value
874 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, store_flat_array), r0, r1, r2);
875
876 oop_maps = new OopMapSet();
877 oop_maps->add_gc_map(call_offset, map);
878 restore_live_registers_except_r0(sasm);
879 }
880 break;
881
882 case C1StubId::substitutability_check_id:
883 {
884 StubFrame f(sasm, "substitutability_check", dont_gc_arguments);
885 OopMap* map = save_live_registers(sasm);
886
887 // Called with store_parameter and not C abi
888
889 f.load_argument(1, r1); // r1,: left
890 f.load_argument(0, r2); // r2,: right
891 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, substitutability_check), r1, r2);
892
893 oop_maps = new OopMapSet();
894 oop_maps->add_gc_map(call_offset, map);
895 restore_live_registers_except_r0(sasm);
896
897 // r0,: are the two operands substitutable
898 }
899 break;
900
901 case C1StubId::register_finalizer_id:
902 {
903 __ set_info("register_finalizer", dont_gc_arguments);
904
905 // This is called via call_runtime so the arguments
906 // will be place in C abi locations
907
908 __ verify_oop(c_rarg0);
909
910 // load the klass and check the has finalizer flag
911 Label register_finalizer;
912 Register t = r5;
913 __ load_klass(t, r0);
914 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
915 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
916 __ ret(lr);
917
918 __ bind(register_finalizer);
919 __ enter();
920 OopMap* oop_map = save_live_registers(sasm);
921 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
922 oop_maps = new OopMapSet();
923 oop_maps->add_gc_map(call_offset, oop_map);
924
925 // Now restore all the live registers
926 restore_live_registers(sasm);
927
928 __ leave();
929 __ ret(lr);
930 }
931 break;
932
933 case C1StubId::throw_class_cast_exception_id:
934 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
935 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
936 }
937 break;
938
939 case C1StubId::throw_incompatible_class_change_error_id:
940 { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments, does_not_return);
941 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
942 }
943 break;
944
945 case C1StubId::throw_illegal_monitor_state_exception_id:
946 { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
947 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
948 }
949 break;
950
951 case C1StubId::throw_identity_exception_id:
952 { StubFrame f(sasm, "throw_identity_exception", dont_gc_arguments);
953 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_identity_exception), true);
954 }
955 break;
956
957 case C1StubId::slow_subtype_check_id:
958 {
959 // Typical calling sequence:
960 // __ push(klass_RInfo); // object klass or other subclass
961 // __ push(sup_k_RInfo); // array element klass or other superclass
962 // __ bl(slow_subtype_check);
963 // Note that the subclass is pushed first, and is therefore deepest.
964 enum layout {
965 r0_off, r0_off_hi,
966 r2_off, r2_off_hi,
967 r4_off, r4_off_hi,
968 r5_off, r5_off_hi,
969 sup_k_off, sup_k_off_hi,
970 klass_off, klass_off_hi,
971 framesize,
972 result_off = sup_k_off
973 };
974
975 __ set_info("slow_subtype_check", dont_gc_arguments);
976 __ push(RegSet::of(r0, r2, r4, r5), sp);
1195 __ leave();
1196 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1197 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1198
1199 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1200 }
1201 break;
1202
1203 case C1StubId::dtrace_object_alloc_id:
1204 { // c_rarg0: object
1205 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1206 save_live_registers(sasm);
1207
1208 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1209
1210 restore_live_registers(sasm);
1211 }
1212 break;
1213
1214 default:
1215 // FIXME: For unhandled trap_id this code fails with assert during vm intialization
1216 // rather than insert a call to unimplemented_entry
1217 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1218 __ mov(r0, (int)id);
1219 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1220 }
1221 break;
1222 }
1223 }
1224
1225
1226 return oop_maps;
1227 }
1228
1229 #undef __
1230
1231 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|