666 break;
667
668 case C1StubId::new_instance_id:
669 case C1StubId::fast_new_instance_id:
670 case C1StubId::fast_new_instance_init_check_id:
671 {
672 Register klass = r3; // Incoming
673 Register obj = r0; // Result
674
675 if (id == C1StubId::new_instance_id) {
676 __ set_info("new_instance", dont_gc_arguments);
677 } else if (id == C1StubId::fast_new_instance_id) {
678 __ set_info("fast new_instance", dont_gc_arguments);
679 } else {
680 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
681 __ set_info("fast new_instance init check", dont_gc_arguments);
682 }
683
684 __ enter();
685 OopMap* map = save_live_registers(sasm);
686 int call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
687 oop_maps = new OopMapSet();
688 oop_maps->add_gc_map(call_offset, map);
689 restore_live_registers_except_r0(sasm);
690 __ verify_oop(obj);
691 __ leave();
692 __ ret(lr);
693
694 // r0,: new instance
695 }
696
697 break;
698
699 case C1StubId::counter_overflow_id:
700 {
701 Register bci = r0, method = r1;
702 __ enter();
703 OopMap* map = save_live_registers(sasm);
704 // Retrieve bci
705 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
706 // And a pointer to the Method*
707 __ ldr(method, Address(rfp, 3*BytesPerWord));
708 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
709 oop_maps = new OopMapSet();
710 oop_maps->add_gc_map(call_offset, map);
711 restore_live_registers(sasm);
712 __ leave();
713 __ ret(lr);
714 }
715 break;
716
717 case C1StubId::new_type_array_id:
718 case C1StubId::new_object_array_id:
719 {
720 Register length = r19; // Incoming
721 Register klass = r3; // Incoming
722 Register obj = r0; // Result
723
724 if (id == C1StubId::new_type_array_id) {
725 __ set_info("new_type_array", dont_gc_arguments);
726 } else {
727 __ set_info("new_object_array", dont_gc_arguments);
728 }
729
730 #ifdef ASSERT
731 // assert object type is really an array of the proper kind
732 {
733 Label ok;
734 Register t0 = obj;
735 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
736 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
737 int tag = ((id == C1StubId::new_type_array_id)
738 ? Klass::_lh_array_tag_type_value
739 : Klass::_lh_array_tag_obj_value);
740 __ mov(rscratch1, tag);
741 __ cmpw(t0, rscratch1);
742 __ br(Assembler::EQ, ok);
743 __ stop("assert(is an array klass)");
744 __ should_not_reach_here();
745 __ bind(ok);
746 }
747 #endif // ASSERT
748
749 __ enter();
750 OopMap* map = save_live_registers(sasm);
751 int call_offset;
752 if (id == C1StubId::new_type_array_id) {
753 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
754 } else {
755 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
756 }
757
758 oop_maps = new OopMapSet();
759 oop_maps->add_gc_map(call_offset, map);
760 restore_live_registers_except_r0(sasm);
761
762 __ verify_oop(obj);
763 __ leave();
764 __ ret(lr);
765
766 // r0: new array
767 }
768 break;
769
770 case C1StubId::new_multi_array_id:
771 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
772 // r0,: klass
773 // r19,: rank
774 // r2: address of 1st dimension
775 OopMap* map = save_live_registers(sasm);
776 __ mov(c_rarg1, r0);
777 __ mov(c_rarg3, r2);
778 __ mov(c_rarg2, r19);
779 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
780
781 oop_maps = new OopMapSet();
782 oop_maps->add_gc_map(call_offset, map);
783 restore_live_registers_except_r0(sasm);
784
785 // r0,: new multi array
786 __ verify_oop(r0);
787 }
788 break;
789
790 case C1StubId::register_finalizer_id:
791 {
792 __ set_info("register_finalizer", dont_gc_arguments);
793
794 // This is called via call_runtime so the arguments
795 // will be place in C abi locations
796
797 __ verify_oop(c_rarg0);
798
799 // load the klass and check the has finalizer flag
800 Label register_finalizer;
801 Register t = r5;
802 __ load_klass(t, r0);
803 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
804 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
805 __ ret(lr);
806
807 __ bind(register_finalizer);
808 __ enter();
809 OopMap* oop_map = save_live_registers(sasm);
810 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
811 oop_maps = new OopMapSet();
812 oop_maps->add_gc_map(call_offset, oop_map);
813
814 // Now restore all the live registers
815 restore_live_registers(sasm);
816
817 __ leave();
818 __ ret(lr);
819 }
820 break;
821
822 case C1StubId::throw_class_cast_exception_id:
823 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
824 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
825 }
826 break;
827
828 case C1StubId::throw_incompatible_class_change_error_id:
829 { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments, does_not_return);
830 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
831 }
832 break;
833
834 case C1StubId::slow_subtype_check_id:
835 {
836 // Typical calling sequence:
837 // __ push(klass_RInfo); // object klass or other subclass
838 // __ push(sup_k_RInfo); // array element klass or other superclass
839 // __ bl(slow_subtype_check);
840 // Note that the subclass is pushed first, and is therefore deepest.
841 enum layout {
842 r0_off, r0_off_hi,
843 r2_off, r2_off_hi,
844 r4_off, r4_off_hi,
845 r5_off, r5_off_hi,
846 sup_k_off, sup_k_off_hi,
847 klass_off, klass_off_hi,
848 framesize,
849 result_off = sup_k_off
850 };
851
852 __ set_info("slow_subtype_check", dont_gc_arguments);
853 __ push(RegSet::of(r0, r2, r4, r5), sp);
1023 __ leave();
1024 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1025 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1026
1027 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1028 }
1029 break;
1030
1031 case C1StubId::dtrace_object_alloc_id:
1032 { // c_rarg0: object
1033 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1034 save_live_registers(sasm);
1035
1036 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1037
1038 restore_live_registers(sasm);
1039 }
1040 break;
1041
1042 default:
1043 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1044 __ mov(r0, (int)id);
1045 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1046 }
1047 break;
1048 }
1049 }
1050 return oop_maps;
1051 }
1052
1053 #undef __
1054
1055 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|
666 break;
667
668 case C1StubId::new_instance_id:
669 case C1StubId::fast_new_instance_id:
670 case C1StubId::fast_new_instance_init_check_id:
671 {
672 Register klass = r3; // Incoming
673 Register obj = r0; // Result
674
675 if (id == C1StubId::new_instance_id) {
676 __ set_info("new_instance", dont_gc_arguments);
677 } else if (id == C1StubId::fast_new_instance_id) {
678 __ set_info("fast new_instance", dont_gc_arguments);
679 } else {
680 assert(id == C1StubId::fast_new_instance_init_check_id, "bad C1StubId");
681 __ set_info("fast new_instance init check", dont_gc_arguments);
682 }
683
684 __ enter();
685 OopMap* map = save_live_registers(sasm);
686 int call_offset;
687 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
688 oop_maps = new OopMapSet();
689 oop_maps->add_gc_map(call_offset, map);
690 restore_live_registers_except_r0(sasm);
691 __ verify_oop(obj);
692 __ leave();
693 __ ret(lr);
694
695 // r0,: new instance
696 }
697
698 break;
699
700 case C1StubId::counter_overflow_id:
701 {
702 Register bci = r0, method = r1;
703 __ enter();
704 OopMap* map = save_live_registers(sasm);
705 // Retrieve bci
706 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
707 // And a pointer to the Method*
708 __ ldr(method, Address(rfp, 3*BytesPerWord));
709 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
710 oop_maps = new OopMapSet();
711 oop_maps->add_gc_map(call_offset, map);
712 restore_live_registers(sasm);
713 __ leave();
714 __ ret(lr);
715 }
716 break;
717
718 case C1StubId::new_type_array_id:
719 case C1StubId::new_object_array_id:
720 case C1StubId::new_null_free_array_id:
721 {
722 Register length = r19; // Incoming
723 Register klass = r3; // Incoming
724 Register obj = r0; // Result
725
726 if (id == C1StubId::new_type_array_id) {
727 __ set_info("new_type_array", dont_gc_arguments);
728 } else if (id == C1StubId::new_object_array_id) {
729 __ set_info("new_object_array", dont_gc_arguments);
730 } else {
731 __ set_info("new_null_free_array", dont_gc_arguments);
732 }
733
734 #ifdef ASSERT
735 // assert object type is really an array of the proper kind
736 {
737 Label ok;
738 Register t0 = obj;
739 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
740 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
741 switch (id) {
742 case C1StubId::new_type_array_id:
743 __ cmpw(t0, Klass::_lh_array_tag_type_value);
744 __ br(Assembler::EQ, ok);
745 __ stop("assert(is a type array klass)");
746 break;
747 case C1StubId::new_object_array_id:
748 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // new "[Ljava/lang/Object;"
749 __ br(Assembler::EQ, ok);
750 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // new "[LVT;"
751 __ br(Assembler::EQ, ok);
752 __ stop("assert(is an object or inline type array klass)");
753 break;
754 case C1StubId::new_null_free_array_id:
755 __ cmpw(t0, Klass::_lh_array_tag_vt_value); // the array can be a flat array.
756 __ br(Assembler::EQ, ok);
757 __ cmpw(t0, Klass::_lh_array_tag_obj_value); // the array cannot be a flat array (due to InlineArrayElementMaxFlatSize, etc)
758 __ br(Assembler::EQ, ok);
759 __ stop("assert(is an object or inline type array klass)");
760 break;
761 default: ShouldNotReachHere();
762 }
763 __ should_not_reach_here();
764 __ bind(ok);
765 }
766 #endif // ASSERT
767
768 __ enter();
769 OopMap* map = save_live_registers(sasm);
770 int call_offset;
771 if (id == C1StubId::new_type_array_id) {
772 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
773 } else if (id == C1StubId::new_object_array_id) {
774 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
775 } else {
776 assert(id == C1StubId::new_null_free_array_id, "must be");
777 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_null_free_array), klass, length);
778 }
779
780 oop_maps = new OopMapSet();
781 oop_maps->add_gc_map(call_offset, map);
782 restore_live_registers_except_r0(sasm);
783
784 __ verify_oop(obj);
785 __ leave();
786 __ ret(lr);
787
788 // r0: new array
789 }
790 break;
791
792 case C1StubId::new_multi_array_id:
793 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
794 // r0,: klass
795 // r19,: rank
796 // r2: address of 1st dimension
797 OopMap* map = save_live_registers(sasm);
798 __ mov(c_rarg1, r0);
799 __ mov(c_rarg3, r2);
800 __ mov(c_rarg2, r19);
801 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, new_multi_array), r1, r2, r3);
802
803 oop_maps = new OopMapSet();
804 oop_maps->add_gc_map(call_offset, map);
805 restore_live_registers_except_r0(sasm);
806
807 // r0,: new multi array
808 __ verify_oop(r0);
809 }
810 break;
811
812 case C1StubId::buffer_inline_args_id:
813 case C1StubId::buffer_inline_args_no_receiver_id:
814 {
815 const char* name = (id == C1StubId::buffer_inline_args_id) ?
816 "buffer_inline_args" : "buffer_inline_args_no_receiver";
817 StubFrame f(sasm, name, dont_gc_arguments);
818 OopMap* map = save_live_registers(sasm);
819 Register method = r19; // Incoming
820 address entry = (id == C1StubId::buffer_inline_args_id) ?
821 CAST_FROM_FN_PTR(address, buffer_inline_args) :
822 CAST_FROM_FN_PTR(address, buffer_inline_args_no_receiver);
823 // This is called from a C1 method's scalarized entry point
824 // where r0-r7 may be holding live argument values so we can't
825 // return the result in r0 as the other stubs do. LR is used as
826 // a temporay below to avoid the result being clobbered by
827 // restore_live_registers.
828 int call_offset = __ call_RT(lr, noreg, entry, method);
829 oop_maps = new OopMapSet();
830 oop_maps->add_gc_map(call_offset, map);
831 restore_live_registers(sasm);
832 __ mov(r20, lr);
833 __ verify_oop(r20); // r20: an array of buffered value objects
834 }
835 break;
836
837 case C1StubId::load_flat_array_id:
838 {
839 StubFrame f(sasm, "load_flat_array", dont_gc_arguments);
840 OopMap* map = save_live_registers(sasm);
841
842 // Called with store_parameter and not C abi
843
844 f.load_argument(1, r0); // r0,: array
845 f.load_argument(0, r1); // r1,: index
846 int call_offset = __ call_RT(r0, noreg, CAST_FROM_FN_PTR(address, load_flat_array), r0, r1);
847
848 // Ensure the stores that initialize the buffer are visible
849 // before any subsequent store that publishes this reference.
850 __ membar(Assembler::StoreStore);
851
852 oop_maps = new OopMapSet();
853 oop_maps->add_gc_map(call_offset, map);
854 restore_live_registers_except_r0(sasm);
855
856 // r0: loaded element at array[index]
857 __ verify_oop(r0);
858 }
859 break;
860
861 case C1StubId::store_flat_array_id:
862 {
863 StubFrame f(sasm, "store_flat_array", dont_gc_arguments);
864 OopMap* map = save_live_registers(sasm, 4);
865
866 // Called with store_parameter and not C abi
867
868 f.load_argument(2, r0); // r0: array
869 f.load_argument(1, r1); // r1: index
870 f.load_argument(0, r2); // r2: value
871 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, store_flat_array), r0, r1, r2);
872
873 oop_maps = new OopMapSet();
874 oop_maps->add_gc_map(call_offset, map);
875 restore_live_registers_except_r0(sasm);
876 }
877 break;
878
879 case C1StubId::substitutability_check_id:
880 {
881 StubFrame f(sasm, "substitutability_check", dont_gc_arguments);
882 OopMap* map = save_live_registers(sasm);
883
884 // Called with store_parameter and not C abi
885
886 f.load_argument(1, r1); // r1,: left
887 f.load_argument(0, r2); // r2,: right
888 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, substitutability_check), r1, r2);
889
890 oop_maps = new OopMapSet();
891 oop_maps->add_gc_map(call_offset, map);
892 restore_live_registers_except_r0(sasm);
893
894 // r0,: are the two operands substitutable
895 }
896 break;
897
898 case C1StubId::register_finalizer_id:
899 {
900 __ set_info("register_finalizer", dont_gc_arguments);
901
902 // This is called via call_runtime so the arguments
903 // will be place in C abi locations
904
905 __ verify_oop(c_rarg0);
906
907 // load the klass and check the has finalizer flag
908 Label register_finalizer;
909 Register t = r5;
910 __ load_klass(t, r0);
911 __ ldrb(t, Address(t, Klass::misc_flags_offset()));
912 __ tbnz(t, exact_log2(KlassFlags::_misc_has_finalizer), register_finalizer);
913 __ ret(lr);
914
915 __ bind(register_finalizer);
916 __ enter();
917 OopMap* oop_map = save_live_registers(sasm);
918 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
919 oop_maps = new OopMapSet();
920 oop_maps->add_gc_map(call_offset, oop_map);
921
922 // Now restore all the live registers
923 restore_live_registers(sasm);
924
925 __ leave();
926 __ ret(lr);
927 }
928 break;
929
930 case C1StubId::throw_class_cast_exception_id:
931 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments, does_not_return);
932 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
933 }
934 break;
935
936 case C1StubId::throw_incompatible_class_change_error_id:
937 { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments, does_not_return);
938 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
939 }
940 break;
941
942 case C1StubId::throw_illegal_monitor_state_exception_id:
943 { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
944 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
945 }
946 break;
947
948 case C1StubId::throw_identity_exception_id:
949 { StubFrame f(sasm, "throw_identity_exception", dont_gc_arguments);
950 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_identity_exception), true);
951 }
952 break;
953
954 case C1StubId::slow_subtype_check_id:
955 {
956 // Typical calling sequence:
957 // __ push(klass_RInfo); // object klass or other subclass
958 // __ push(sup_k_RInfo); // array element klass or other superclass
959 // __ bl(slow_subtype_check);
960 // Note that the subclass is pushed first, and is therefore deepest.
961 enum layout {
962 r0_off, r0_off_hi,
963 r2_off, r2_off_hi,
964 r4_off, r4_off_hi,
965 r5_off, r5_off_hi,
966 sup_k_off, sup_k_off_hi,
967 klass_off, klass_off_hi,
968 framesize,
969 result_off = sup_k_off
970 };
971
972 __ set_info("slow_subtype_check", dont_gc_arguments);
973 __ push(RegSet::of(r0, r2, r4, r5), sp);
1143 __ leave();
1144 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1145 assert(deopt_blob != nullptr, "deoptimization blob must have been created");
1146
1147 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1148 }
1149 break;
1150
1151 case C1StubId::dtrace_object_alloc_id:
1152 { // c_rarg0: object
1153 StubFrame f(sasm, "dtrace_object_alloc", dont_gc_arguments);
1154 save_live_registers(sasm);
1155
1156 __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), c_rarg0);
1157
1158 restore_live_registers(sasm);
1159 }
1160 break;
1161
1162 default:
1163 // FIXME: For unhandled trap_id this code fails with assert during vm intialization
1164 // rather than insert a call to unimplemented_entry
1165 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments, does_not_return);
1166 __ mov(r0, (int)id);
1167 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1168 }
1169 break;
1170 }
1171 }
1172
1173
1174 return oop_maps;
1175 }
1176
1177 #undef __
1178
1179 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); }
|