896 __ sve_sunpklo(z10, __ H, z11); // sunpklo z10.h, z11.b
897 __ sve_scvtf(z1, __ D, p0, z0, __ S); // scvtf z1.d, p0/m, z0.s
898 __ sve_scvtf(z3, __ D, p1, z2, __ D); // scvtf z3.d, p1/m, z2.d
899 __ sve_scvtf(z6, __ S, p2, z1, __ D); // scvtf z6.s, p2/m, z1.d
900 __ sve_scvtf(z6, __ S, p3, z1, __ S); // scvtf z6.s, p3/m, z1.s
901 __ sve_scvtf(z6, __ H, p3, z1, __ S); // scvtf z6.h, p3/m, z1.s
902 __ sve_scvtf(z6, __ H, p3, z1, __ D); // scvtf z6.h, p3/m, z1.d
903 __ sve_scvtf(z6, __ H, p3, z1, __ H); // scvtf z6.h, p3/m, z1.h
904 __ sve_fcvt(z5, __ D, p3, z4, __ S); // fcvt z5.d, p3/m, z4.s
905 __ sve_fcvt(z1, __ S, p3, z0, __ D); // fcvt z1.s, p3/m, z0.d
906 __ sve_fcvtzs(z19, __ D, p2, z1, __ D); // fcvtzs z19.d, p2/m, z1.d
907 __ sve_fcvtzs(z9, __ S, p1, z8, __ S); // fcvtzs z9.s, p1/m, z8.s
908 __ sve_fcvtzs(z1, __ S, p2, z0, __ D); // fcvtzs z1.s, p2/m, z0.d
909 __ sve_fcvtzs(z1, __ D, p3, z0, __ S); // fcvtzs z1.d, p3/m, z0.s
910 __ sve_fcvtzs(z1, __ S, p4, z18, __ H); // fcvtzs z1.s, p4/m, z18.h
911 __ sve_lasta(r0, __ B, p0, z15); // lasta w0, p0, z15.b
912 __ sve_lastb(r1, __ B, p1, z16); // lastb w1, p1, z16.b
913 __ sve_lasta(v0, __ B, p0, z15); // lasta b0, p0, z15.b
914 __ sve_lastb(v1, __ B, p1, z16); // lastb b1, p1, z16.b
915 __ sve_index(z6, __ S, 1, 1); // index z6.s, #1, #1
916 __ sve_cpy(z7, __ H, p3, r5); // cpy z7.h, p3/m, w5
917 __ sve_tbl(z16, __ S, z17, z18); // tbl z16.s, {z17.s}, z18.s
918 __ sve_ld1w_gather(z15, p0, r5, z16); // ld1w {z15.s}, p0/z, [x5, z16.s, uxtw #2]
919 __ sve_ld1d_gather(z15, p0, r5, z16); // ld1d {z15.d}, p0/z, [x5, z16.d, uxtw #3]
920 __ sve_st1w_scatter(z15, p0, r5, z16); // st1w {z15.s}, p0, [x5, z16.s, uxtw #2]
921 __ sve_st1d_scatter(z15, p0, r5, z16); // st1d {z15.d}, p0, [x5, z16.d, uxtw #3]
922 __ sve_and(p0, p1, p2, p3); // and p0.b, p1/z, p2.b, p3.b
923 __ sve_ands(p4, p5, p6, p0); // ands p4.b, p5/z, p6.b, p0.b
924 __ sve_eor(p0, p1, p2, p3); // eor p0.b, p1/z, p2.b, p3.b
925 __ sve_eors(p5, p6, p0, p1); // eors p5.b, p6/z, p0.b, p1.b
926 __ sve_orr(p0, p1, p2, p3); // orr p0.b, p1/z, p2.b, p3.b
927 __ sve_orrs(p9, p1, p4, p5); // orrs p9.b, p1/z, p4.b, p5.b
928 __ sve_bic(p10, p7, p9, p11); // bic p10.b, p7/z, p9.b, p11.b
929 __ sve_ptest(p7, p1); // ptest p7, p1.b
930 __ sve_ptrue(p1, __ B); // ptrue p1.b
931 __ sve_ptrue(p1, __ B, 0b00001); // ptrue p1.b, vl1
932 __ sve_ptrue(p1, __ B, 0b00101); // ptrue p1.b, vl5
933 __ sve_ptrue(p1, __ B, 0b01001); // ptrue p1.b, vl16
934 __ sve_ptrue(p1, __ B, 0b01101); // ptrue p1.b, vl256
935 __ sve_ptrue(p2, __ H); // ptrue p2.h
938 __ sve_ptrue(p2, __ H, 0b01010); // ptrue p2.h, vl32
939 __ sve_ptrue(p3, __ S); // ptrue p3.s
940 __ sve_ptrue(p3, __ S, 0b00011); // ptrue p3.s, vl3
941 __ sve_ptrue(p3, __ S, 0b00111); // ptrue p3.s, vl7
942 __ sve_ptrue(p3, __ S, 0b01011); // ptrue p3.s, vl64
943 __ sve_ptrue(p4, __ D); // ptrue p4.d
944 __ sve_ptrue(p4, __ D, 0b00100); // ptrue p4.d, vl4
945 __ sve_ptrue(p4, __ D, 0b01000); // ptrue p4.d, vl8
946 __ sve_ptrue(p4, __ D, 0b01100); // ptrue p4.d, vl128
947 __ sve_pfalse(p7); // pfalse p7.b
948 __ sve_uzp1(p0, __ B, p0, p1); // uzp1 p0.b, p0.b, p1.b
949 __ sve_uzp1(p0, __ H, p0, p1); // uzp1 p0.h, p0.h, p1.h
950 __ sve_uzp1(p0, __ S, p0, p1); // uzp1 p0.s, p0.s, p1.s
951 __ sve_uzp1(p0, __ D, p0, p1); // uzp1 p0.d, p0.d, p1.d
952 __ sve_uzp2(p0, __ B, p0, p1); // uzp2 p0.b, p0.b, p1.b
953 __ sve_uzp2(p0, __ H, p0, p1); // uzp2 p0.h, p0.h, p1.h
954 __ sve_uzp2(p0, __ S, p0, p1); // uzp2 p0.s, p0.s, p1.s
955 __ sve_uzp2(p0, __ D, p0, p1); // uzp2 p0.d, p0.d, p1.d
956 __ sve_punpklo(p1, p0); // punpklo p1.h, p0.b
957 __ sve_punpkhi(p1, p0); // punpkhi p1.h, p0.b
958 __ sve_ext(z17, z16, 63); // ext z17.b, z17.b, z16.b, #63
959
960 // FloatImmediateOp
961 __ fmovd(v0, 2.0); // fmov d0, #2.0
962 __ fmovd(v0, 2.125); // fmov d0, #2.125
963 __ fmovd(v0, 4.0); // fmov d0, #4.0
964 __ fmovd(v0, 4.25); // fmov d0, #4.25
965 __ fmovd(v0, 8.0); // fmov d0, #8.0
966 __ fmovd(v0, 8.5); // fmov d0, #8.5
967 __ fmovd(v0, 16.0); // fmov d0, #16.0
968 __ fmovd(v0, 17.0); // fmov d0, #17.0
969 __ fmovd(v0, 0.125); // fmov d0, #0.125
970 __ fmovd(v0, 0.1328125); // fmov d0, #0.1328125
971 __ fmovd(v0, 0.25); // fmov d0, #0.25
972 __ fmovd(v0, 0.265625); // fmov d0, #0.265625
973 __ fmovd(v0, 0.5); // fmov d0, #0.5
974 __ fmovd(v0, 0.53125); // fmov d0, #0.53125
975 __ fmovd(v0, 1.0); // fmov d0, #1.0
976 __ fmovd(v0, 1.0625); // fmov d0, #1.0625
977 __ fmovd(v0, -2.0); // fmov d0, #-2.0
978 __ fmovd(v0, -2.125); // fmov d0, #-2.125
1127 __ sve_orr(z17, __ S, 917504u); // orr z17.s, z17.s, #0xe0000
1128
1129 // SVEBinaryImmOp
1130 __ sve_add(z19, __ S, 148u); // add z19.s, z19.s, #0x94
1131 __ sve_sub(z22, __ S, 244u); // sub z22.s, z22.s, #0xf4
1132 __ sve_and(z20, __ S, 491520u); // and z20.s, z20.s, #0x78000
1133 __ sve_eor(z17, __ D, 18302628885642084351u); // eor z17.d, z17.d, #0xfe000000007fffff
1134 __ sve_orr(z4, __ D, 18158513714670600195u); // orr z4.d, z4.d, #0xfc000003fc000003
1135
1136 // SVEVectorOp
1137 __ sve_add(z2, __ H, z8, z8); // add z2.h, z8.h, z8.h
1138 __ sve_sub(z24, __ S, z17, z30); // sub z24.s, z17.s, z30.s
1139 __ sve_fadd(z4, __ S, z30, z1); // fadd z4.s, z30.s, z1.s
1140 __ sve_fmul(z19, __ S, z12, z0); // fmul z19.s, z12.s, z0.s
1141 __ sve_fsub(z7, __ S, z24, z17); // fsub z7.s, z24.s, z17.s
1142 __ sve_abs(z27, __ D, p1, z9); // abs z27.d, p1/m, z9.d
1143 __ sve_add(z23, __ D, p3, z16); // add z23.d, p3/m, z23.d, z16.d
1144 __ sve_and(z22, __ D, p5, z20); // and z22.d, p5/m, z22.d, z20.d
1145 __ sve_asr(z28, __ S, p2, z13); // asr z28.s, p2/m, z28.s, z13.s
1146 __ sve_bic(z7, __ H, p5, z28); // bic z7.h, p5/m, z7.h, z28.h
1147 __ sve_cnt(z11, __ S, p3, z11); // cnt z11.s, p3/m, z11.s
1148 __ sve_eor(z1, __ S, p6, z8); // eor z1.s, p6/m, z1.s, z8.s
1149 __ sve_lsl(z13, __ S, p4, z17); // lsl z13.s, p4/m, z13.s, z17.s
1150 __ sve_lsr(z4, __ H, p0, z3); // lsr z4.h, p0/m, z4.h, z3.h
1151 __ sve_mul(z7, __ S, p3, z14); // mul z7.s, p3/m, z7.s, z14.s
1152 __ sve_neg(z4, __ B, p3, z29); // neg z4.b, p3/m, z29.b
1153 __ sve_not(z0, __ D, p2, z21); // not z0.d, p2/m, z21.d
1154 __ sve_orr(z3, __ S, p0, z9); // orr z3.s, p0/m, z3.s, z9.s
1155 __ sve_smax(z28, __ B, p2, z24); // smax z28.b, p2/m, z28.b, z24.b
1156 __ sve_smin(z19, __ D, p1, z23); // smin z19.d, p1/m, z19.d, z23.d
1157 __ sve_sub(z13, __ D, p5, z10); // sub z13.d, p5/m, z13.d, z10.d
1158 __ sve_fabs(z12, __ D, p4, z30); // fabs z12.d, p4/m, z30.d
1159 __ sve_fadd(z14, __ D, p0, z29); // fadd z14.d, p0/m, z14.d, z29.d
1160 __ sve_fdiv(z21, __ D, p5, z7); // fdiv z21.d, p5/m, z21.d, z7.d
1161 __ sve_fmax(z2, __ D, p0, z26); // fmax z2.d, p0/m, z2.d, z26.d
1162 __ sve_fmin(z9, __ D, p4, z17); // fmin z9.d, p4/m, z9.d, z17.d
1163 __ sve_fmul(z0, __ D, p1, z2); // fmul z0.d, p1/m, z0.d, z2.d
1164 __ sve_fneg(z14, __ D, p1, z11); // fneg z14.d, p1/m, z11.d
1165 __ sve_frintm(z14, __ S, p4, z29); // frintm z14.s, p4/m, z29.s
1166 __ sve_frintn(z3, __ S, p0, z22); // frintn z3.s, p0/m, z22.s
1167 __ sve_frintp(z3, __ S, p6, z27); // frintp z3.s, p6/m, z27.s
1168 __ sve_fsqrt(z19, __ D, p5, z7); // fsqrt z19.d, p5/m, z7.d
1169 __ sve_fsub(z21, __ S, p3, z5); // fsub z21.s, p3/m, z21.s, z5.s
1170 __ sve_fmad(z25, __ D, p1, z21, z17); // fmad z25.d, p1/m, z21.d, z17.d
1171 __ sve_fmla(z0, __ S, p0, z9, z19); // fmla z0.s, p0/m, z9.s, z19.s
1172 __ sve_fmls(z7, __ D, p3, z14, z17); // fmls z7.d, p3/m, z14.d, z17.d
1173 __ sve_fmsb(z11, __ D, p3, z24, z17); // fmsb z11.d, p3/m, z24.d, z17.d
1174 __ sve_fnmad(z17, __ D, p2, z15, z14); // fnmad z17.d, p2/m, z15.d, z14.d
1175 __ sve_fnmsb(z22, __ S, p7, z22, z7); // fnmsb z22.s, p7/m, z22.s, z7.s
1176 __ sve_fnmla(z5, __ S, p7, z27, z10); // fnmla z5.s, p7/m, z27.s, z10.s
1177 __ sve_fnmls(z14, __ S, p6, z21, z20); // fnmls z14.s, p6/m, z21.s, z20.s
1178 __ sve_mla(z3, __ D, p5, z25, z5); // mla z3.d, p5/m, z25.d, z5.d
1179 __ sve_mls(z29, __ H, p4, z17, z1); // mls z29.h, p4/m, z17.h, z1.h
1180 __ sve_and(z14, z29, z13); // and z14.d, z29.d, z13.d
1181 __ sve_eor(z17, z2, z30); // eor z17.d, z2.d, z30.d
1182 __ sve_orr(z22, z21, z29); // orr z22.d, z21.d, z29.d
1183 __ sve_bic(z8, z2, z0); // bic z8.d, z2.d, z0.d
1184 __ sve_uzp1(z23, __ S, z22, z0); // uzp1 z23.s, z22.s, z0.s
1185 __ sve_uzp2(z25, __ H, z26, z23); // uzp2 z25.h, z26.h, z23.h
1186
1187 // SVEReductionOp
1188 __ sve_andv(v21, __ B, p5, z1); // andv b21, p5, z1.b
1189 __ sve_orv(v10, __ S, p5, z11); // orv s10, p5, z11.s
1190 __ sve_eorv(v23, __ D, p6, z8); // eorv d23, p6, z8.d
1191 __ sve_smaxv(v17, __ S, p5, z19); // smaxv s17, p5, z19.s
1192 __ sve_sminv(v4, __ D, p5, z13); // sminv d4, p5, z13.d
1193 __ sve_fminv(v22, __ D, p7, z30); // fminv d22, p7, z30.d
1194 __ sve_fmaxv(v17, __ S, p4, z14); // fmaxv s17, p4, z14.s
1195 __ sve_fadda(v12, __ S, p7, z20); // fadda s12, p7, s12, z20.s
1196 __ sve_uaddv(v1, __ B, p3, z13); // uaddv d1, p3, z13.b
1197
1198 __ bind(forth);
1199
1200 /*
1201 */
1202
1203 static const unsigned int insns[] =
1204 {
1205 0x8b0d82fa, 0xcb49970c, 0xab889dfc, 0xeb9ee787,
1206 0x0b9b3ec9, 0x4b9179a3, 0x2b88474e, 0x6b8c56c0,
1207 0x8a1a51e0, 0xaa11f4ba, 0xca0281b8, 0xea918c7c,
1208 0x0a5d4a19, 0x2a4b262d, 0x4a513ca5, 0x6a9b6ae2,
1209 0x8a70b79b, 0xaaba9728, 0xca6dfe3d, 0xea627f1c,
1210 0x0aa70f53, 0x2aaa0f06, 0x4a6176a4, 0x6a604eb0,
1211 0x1105ed91, 0x3100583e, 0x5101f8bd, 0x710f0306,
1212 0x9101a1a0, 0xb10a5cc8, 0xd10810aa, 0xf10fd061,
1213 0x120cb166, 0x321764bc, 0x52174681, 0x720c0227,
1214 0x9241018e, 0xb25a2969, 0xd278b411, 0xf26aad01,
1215 0x14000000, 0x17ffffd7, 0x140003e4, 0x94000000,
1216 0x97ffffd4, 0x940003e1, 0x3400000a, 0x34fffa2a,
1217 0x34007bca, 0x35000008, 0x35fff9c8, 0x35007b68,
1218 0xb400000b, 0xb4fff96b, 0xb4007b0b, 0xb500001d,
1219 0xb5fff91d, 0xb5007abd, 0x10000013, 0x10fff8b3,
1220 0x10007a53, 0x90000013, 0x36300016, 0x3637f836,
1221 0x363079d6, 0x3758000c, 0x375ff7cc, 0x3758796c,
1222 0x128313a0, 0x528a32c7, 0x7289173b, 0x92ab3acc,
1223 0xd2a0bf94, 0xf2c285e8, 0x9358722f, 0x330e652f,
1224 0x53067f3b, 0x93577c53, 0xb34a1aac, 0xd35a4016,
1225 0x13946c63, 0x93c3dbc8, 0x54000000, 0x54fff5a0,
1226 0x54007740, 0x54000001, 0x54fff541, 0x540076e1,
1227 0x54000002, 0x54fff4e2, 0x54007682, 0x54000002,
1228 0x54fff482, 0x54007622, 0x54000003, 0x54fff423,
1229 0x540075c3, 0x54000003, 0x54fff3c3, 0x54007563,
1230 0x54000004, 0x54fff364, 0x54007504, 0x54000005,
1231 0x54fff305, 0x540074a5, 0x54000006, 0x54fff2a6,
1232 0x54007446, 0x54000007, 0x54fff247, 0x540073e7,
1233 0x54000008, 0x54fff1e8, 0x54007388, 0x54000009,
1234 0x54fff189, 0x54007329, 0x5400000a, 0x54fff12a,
1235 0x540072ca, 0x5400000b, 0x54fff0cb, 0x5400726b,
1236 0x5400000c, 0x54fff06c, 0x5400720c, 0x5400000d,
1237 0x54fff00d, 0x540071ad, 0x5400000e, 0x54ffefae,
1238 0x5400714e, 0x5400000f, 0x54ffef4f, 0x540070ef,
1239 0xd40658e1, 0xd4014d22, 0xd4046543, 0xd4273f60,
1240 0xd44cad80, 0xd503201f, 0xd503203f, 0xd503205f,
1241 0xd503209f, 0xd50320bf, 0xd503219f, 0xd50323bf,
1242 0xd503239f, 0xd50321df, 0xd50323ff, 0xd50323df,
1243 0xd503211f, 0xd503233f, 0xd503231f, 0xd503215f,
1244 0xd503237f, 0xd503235f, 0xd69f03e0, 0xd6bf03e0,
1245 0xd5033fdf, 0xd503207f, 0xd50320ff, 0xd5033e9f,
1246 0xd50332bf, 0xd61f0200, 0xd63f0280, 0xdac123ea,
1247 0xdac127fb, 0xdac12be8, 0xdac12fe0, 0xdac133e1,
1248 0xdac137f5, 0xdac13bf1, 0xdac13ffd, 0xdac147fd,
1249 0xd61f0b9f, 0xd61f0c3f, 0xd63f0aff, 0xd63f0ebf,
1250 0xdac143f4, 0xc8167e7b, 0xc80bfcd0, 0xc85f7c11,
1251 0xc85ffd44, 0xc89ffed8, 0xc8dffe6a, 0x88017fc5,
1252 0x8808fe2c, 0x885f7dc9, 0x885ffc27, 0x889ffe05,
1253 0x88dffd82, 0x480a7c6c, 0x481cff4e, 0x485f7d5e,
1254 0x485ffeae, 0x489ffd2d, 0x48dfff76, 0x081c7d73,
1255 0x081efc53, 0x085f7ee2, 0x085ffc01, 0x089ffe0c,
1256 0x08dffded, 0xc87f55b1, 0xc87ff90b, 0xc8382c2d,
1257 0xc83aedb5, 0x887f0d94, 0x887f87a6, 0x88262e04,
1258 0x8824b2be, 0xf8061366, 0xb802d151, 0x381e32da,
1383 0x25b8efe2, 0x25f8f007, 0x2538dfea, 0x25b8dfeb,
1384 0xa400a3e0, 0xa420a7e0, 0xa4484be0, 0xa467afe0,
1385 0xa4a8a7ea, 0xa547a814, 0xa4084ffe, 0xa55c53e0,
1386 0xa5e1540b, 0xe400fbf6, 0xe408ffff, 0xe420e7e0,
1387 0xe4484be0, 0xe460efe0, 0xe547e400, 0xe4014be0,
1388 0xe4a84fe0, 0xe5f15000, 0x858043e0, 0x85a043ff,
1389 0xe59f5d08, 0x0420e3e9, 0x0460e3ea, 0x04a0e3eb,
1390 0x04e0e3ec, 0x25104042, 0x25104871, 0x25904861,
1391 0x25904c92, 0x05344020, 0x05744041, 0x05b44062,
1392 0x05f44083, 0x252c8840, 0x253c1420, 0x25681572,
1393 0x25a21ce3, 0x25ea1e34, 0x0522c020, 0x05e6c0a4,
1394 0x2401a001, 0x2443a051, 0x24858881, 0x24c78cd1,
1395 0x24850891, 0x24c70cc1, 0x250f9001, 0x25508051,
1396 0x25802491, 0x25df28c1, 0x25850c81, 0x251e10d1,
1397 0x65816001, 0x65c36051, 0x65854891, 0x65c74cc1,
1398 0x05733820, 0x05b238a4, 0x05f138e6, 0x0570396a,
1399 0x65d0a001, 0x65d6a443, 0x65d4a826, 0x6594ac26,
1400 0x6554ac26, 0x6556ac26, 0x6552ac26, 0x65cbac85,
1401 0x65caac01, 0x65dea833, 0x659ca509, 0x65d8a801,
1402 0x65dcac01, 0x655cb241, 0x0520a1e0, 0x0521a601,
1403 0x052281e0, 0x05238601, 0x04a14026, 0x0568aca7,
1404 0x05b23230, 0x853040af, 0xc5b040af, 0xe57080af,
1405 0xe5b080af, 0x25034440, 0x254054c4, 0x25034640,
1406 0x25415a05, 0x25834440, 0x25c54489, 0x250b5d3a,
1407 0x2550dc20, 0x2518e3e1, 0x2518e021, 0x2518e0a1,
1408 0x2518e121, 0x2518e1a1, 0x2558e3e2, 0x2558e042,
1409 0x2558e0c2, 0x2558e142, 0x2598e3e3, 0x2598e063,
1410 0x2598e0e3, 0x2598e163, 0x25d8e3e4, 0x25d8e084,
1411 0x25d8e104, 0x25d8e184, 0x2518e407, 0x05214800,
1412 0x05614800, 0x05a14800, 0x05e14800, 0x05214c00,
1413 0x05614c00, 0x05a14c00, 0x05e14c00, 0x05304001,
1414 0x05314001, 0x05271e11, 0x1e601000, 0x1e603000,
1415 0x1e621000, 0x1e623000, 0x1e641000, 0x1e643000,
1416 0x1e661000, 0x1e663000, 0x1e681000, 0x1e683000,
1417 0x1e6a1000, 0x1e6a3000, 0x1e6c1000, 0x1e6c3000,
1418 0x1e6e1000, 0x1e6e3000, 0x1e701000, 0x1e703000,
1419 0x1e721000, 0x1e723000, 0x1e741000, 0x1e743000,
1420 0x1e761000, 0x1e763000, 0x1e781000, 0x1e783000,
1421 0x1e7a1000, 0x1e7a3000, 0x1e7c1000, 0x1e7c3000,
1422 0x1e7e1000, 0x1e7e3000, 0xf82c815f, 0xf8300047,
1423 0xf823126d, 0xf8312070, 0xf82133cb, 0xf82551e8,
1424 0xf83d401e, 0xf8347287, 0xf83762bc, 0xf8bb80b9,
1425 0xf8a10217, 0xf8bf1185, 0xf8a921fc, 0xf8bd33f6,
1426 0xf8b350bf, 0xf8ae43f0, 0xf8b0729b, 0xf8b0616c,
1427 0xf8e983c6, 0xf8f1039b, 0xf8fe1147, 0xf8f4208a,
1428 0xf8f83231, 0xf8f653a3, 0xf8ef4276, 0xf8f37056,
1429 0xf8ef6186, 0xf87081ab, 0xf87703c1, 0xf8731225,
1430 0xf86222d0, 0xf86d32aa, 0xf87d519b, 0xf87b4023,
1431 0xf87f7278, 0xf8716389, 0xb83b80ef, 0xb83503f7,
1432 0xb83913e2, 0xb83b2150, 0xb8373073, 0xb8305320,
1433 0xb83a4057, 0xb830708c, 0xb83c63be, 0xb8b080db,
1434 0xb8a901fd, 0xb8a710e4, 0xb8af22e9, 0xb8a83382,
1435 0xb8b550bf, 0xb8bb4220, 0xb8af7344, 0xb8a862dc,
1436 0xb8fb833b, 0xb8f70080, 0xb8e61010, 0xb8e4202f,
1437 0xb8ea30a7, 0xb8ea50fc, 0xb8f442b7, 0xb8e6710b,
1438 0xb8f160df, 0xb8718182, 0xb87e007d, 0xb87b13b6,
1439 0xb86e238d, 0xb87130b8, 0xb862514e, 0xb870436b,
1440 0xb877708c, 0xb8766091, 0xce304661, 0xce0c09cc,
1441 0xce748c70, 0xce863cb7, 0xce7b8191, 0xce668610,
1442 0xcec08382, 0xce668883, 0x25a0cdd1, 0x25a1c86c,
1443 0x058000b8, 0x054242ca, 0x0500051e, 0x2520cf00,
1444 0x25e1c951, 0x058039ea, 0x05400e1b, 0x05009891,
1445 0x2520c09c, 0x25a1d448, 0x05801e36, 0x05400516,
1446 0x050039fe, 0x2520ce0b, 0x25a1d0c8, 0x058074d9,
1447 0x05404531, 0x05031e84, 0x2560cf1a, 0x2561dda2,
1448 0x058026a3, 0x05404c35, 0x05007851, 0x25a0d293,
1449 0x25a1de96, 0x05808874, 0x05423bb1, 0x050030e4,
1450 0x04680102, 0x04be0638, 0x658103c4, 0x65800993,
1451 0x65910707, 0x04d6a53b, 0x04c00e17, 0x04da1696,
1452 0x049089bc, 0x045b1787, 0x049aad6b, 0x04991901,
1453 0x0493922d, 0x04518064, 0x04900dc7, 0x0417afa4,
1454 0x04deaaa0, 0x04980123, 0x04080b1c, 0x04ca06f3,
1455 0x04c1154d, 0x04dcb3cc, 0x65c083ae, 0x65cd94f5,
1456 0x65c68342, 0x65c79229, 0x65c28440, 0x04dda56e,
1457 0x6582b3ae, 0x6580a2c3, 0x6581bb63, 0x65cdb4f3,
1458 0x65818cb5, 0x65f186b9, 0x65b30120, 0x65f12dc7,
1459 0x65f1af0b, 0x65eec9f1, 0x65a7fed6, 0x65aa5f65,
1460 0x65b47aae, 0x04c55723, 0x0441723d, 0x042d33ae,
1461 0x04be3051, 0x047d32b6, 0x04e03048, 0x05a06ad7,
1462 0x05776f59, 0x041a3435, 0x0498356a, 0x04d93917,
1463 0x04883671, 0x04ca35a4, 0x65c73fd6, 0x658631d1,
1464 0x65983e8c, 0x04012da1,
1465 };
1466 // END Generated code -- do not edit
|
896 __ sve_sunpklo(z10, __ H, z11); // sunpklo z10.h, z11.b
897 __ sve_scvtf(z1, __ D, p0, z0, __ S); // scvtf z1.d, p0/m, z0.s
898 __ sve_scvtf(z3, __ D, p1, z2, __ D); // scvtf z3.d, p1/m, z2.d
899 __ sve_scvtf(z6, __ S, p2, z1, __ D); // scvtf z6.s, p2/m, z1.d
900 __ sve_scvtf(z6, __ S, p3, z1, __ S); // scvtf z6.s, p3/m, z1.s
901 __ sve_scvtf(z6, __ H, p3, z1, __ S); // scvtf z6.h, p3/m, z1.s
902 __ sve_scvtf(z6, __ H, p3, z1, __ D); // scvtf z6.h, p3/m, z1.d
903 __ sve_scvtf(z6, __ H, p3, z1, __ H); // scvtf z6.h, p3/m, z1.h
904 __ sve_fcvt(z5, __ D, p3, z4, __ S); // fcvt z5.d, p3/m, z4.s
905 __ sve_fcvt(z1, __ S, p3, z0, __ D); // fcvt z1.s, p3/m, z0.d
906 __ sve_fcvtzs(z19, __ D, p2, z1, __ D); // fcvtzs z19.d, p2/m, z1.d
907 __ sve_fcvtzs(z9, __ S, p1, z8, __ S); // fcvtzs z9.s, p1/m, z8.s
908 __ sve_fcvtzs(z1, __ S, p2, z0, __ D); // fcvtzs z1.s, p2/m, z0.d
909 __ sve_fcvtzs(z1, __ D, p3, z0, __ S); // fcvtzs z1.d, p3/m, z0.s
910 __ sve_fcvtzs(z1, __ S, p4, z18, __ H); // fcvtzs z1.s, p4/m, z18.h
911 __ sve_lasta(r0, __ B, p0, z15); // lasta w0, p0, z15.b
912 __ sve_lastb(r1, __ B, p1, z16); // lastb w1, p1, z16.b
913 __ sve_lasta(v0, __ B, p0, z15); // lasta b0, p0, z15.b
914 __ sve_lastb(v1, __ B, p1, z16); // lastb b1, p1, z16.b
915 __ sve_index(z6, __ S, 1, 1); // index z6.s, #1, #1
916 __ sve_index(z6, __ B, r5, 2); // index z6.b, w5, #2
917 __ sve_index(z6, __ H, r5, 3); // index z6.h, w5, #3
918 __ sve_index(z6, __ S, r5, 4); // index z6.s, w5, #4
919 __ sve_index(z7, __ D, r5, 5); // index z7.d, x5, #5
920 __ sve_cpy(z7, __ H, p3, r5); // cpy z7.h, p3/m, w5
921 __ sve_tbl(z16, __ S, z17, z18); // tbl z16.s, {z17.s}, z18.s
922 __ sve_ld1w_gather(z15, p0, r5, z16); // ld1w {z15.s}, p0/z, [x5, z16.s, uxtw #2]
923 __ sve_ld1d_gather(z15, p0, r5, z16); // ld1d {z15.d}, p0/z, [x5, z16.d, uxtw #3]
924 __ sve_st1w_scatter(z15, p0, r5, z16); // st1w {z15.s}, p0, [x5, z16.s, uxtw #2]
925 __ sve_st1d_scatter(z15, p0, r5, z16); // st1d {z15.d}, p0, [x5, z16.d, uxtw #3]
926 __ sve_and(p0, p1, p2, p3); // and p0.b, p1/z, p2.b, p3.b
927 __ sve_ands(p4, p5, p6, p0); // ands p4.b, p5/z, p6.b, p0.b
928 __ sve_eor(p0, p1, p2, p3); // eor p0.b, p1/z, p2.b, p3.b
929 __ sve_eors(p5, p6, p0, p1); // eors p5.b, p6/z, p0.b, p1.b
930 __ sve_orr(p0, p1, p2, p3); // orr p0.b, p1/z, p2.b, p3.b
931 __ sve_orrs(p9, p1, p4, p5); // orrs p9.b, p1/z, p4.b, p5.b
932 __ sve_bic(p10, p7, p9, p11); // bic p10.b, p7/z, p9.b, p11.b
933 __ sve_ptest(p7, p1); // ptest p7, p1.b
934 __ sve_ptrue(p1, __ B); // ptrue p1.b
935 __ sve_ptrue(p1, __ B, 0b00001); // ptrue p1.b, vl1
936 __ sve_ptrue(p1, __ B, 0b00101); // ptrue p1.b, vl5
937 __ sve_ptrue(p1, __ B, 0b01001); // ptrue p1.b, vl16
938 __ sve_ptrue(p1, __ B, 0b01101); // ptrue p1.b, vl256
939 __ sve_ptrue(p2, __ H); // ptrue p2.h
942 __ sve_ptrue(p2, __ H, 0b01010); // ptrue p2.h, vl32
943 __ sve_ptrue(p3, __ S); // ptrue p3.s
944 __ sve_ptrue(p3, __ S, 0b00011); // ptrue p3.s, vl3
945 __ sve_ptrue(p3, __ S, 0b00111); // ptrue p3.s, vl7
946 __ sve_ptrue(p3, __ S, 0b01011); // ptrue p3.s, vl64
947 __ sve_ptrue(p4, __ D); // ptrue p4.d
948 __ sve_ptrue(p4, __ D, 0b00100); // ptrue p4.d, vl4
949 __ sve_ptrue(p4, __ D, 0b01000); // ptrue p4.d, vl8
950 __ sve_ptrue(p4, __ D, 0b01100); // ptrue p4.d, vl128
951 __ sve_pfalse(p7); // pfalse p7.b
952 __ sve_uzp1(p0, __ B, p0, p1); // uzp1 p0.b, p0.b, p1.b
953 __ sve_uzp1(p0, __ H, p0, p1); // uzp1 p0.h, p0.h, p1.h
954 __ sve_uzp1(p0, __ S, p0, p1); // uzp1 p0.s, p0.s, p1.s
955 __ sve_uzp1(p0, __ D, p0, p1); // uzp1 p0.d, p0.d, p1.d
956 __ sve_uzp2(p0, __ B, p0, p1); // uzp2 p0.b, p0.b, p1.b
957 __ sve_uzp2(p0, __ H, p0, p1); // uzp2 p0.h, p0.h, p1.h
958 __ sve_uzp2(p0, __ S, p0, p1); // uzp2 p0.s, p0.s, p1.s
959 __ sve_uzp2(p0, __ D, p0, p1); // uzp2 p0.d, p0.d, p1.d
960 __ sve_punpklo(p1, p0); // punpklo p1.h, p0.b
961 __ sve_punpkhi(p1, p0); // punpkhi p1.h, p0.b
962 __ sve_compact(z16, __ S, z16, p1); // compact z16.s, p1, z16.s
963 __ sve_compact(z16, __ D, z16, p1); // compact z16.d, p1, z16.d
964 __ sve_ext(z17, z16, 63); // ext z17.b, z17.b, z16.b, #63
965 __ sve_histcnt(z16, __ S, p0, z16, z16); // histcnt z16.s, p0/z, z16.s, z16.s
966 __ sve_histcnt(z17, __ D, p0, z17, z17); // histcnt z17.d, p0/z, z17.d, z17.d
967
968 // FloatImmediateOp
969 __ fmovd(v0, 2.0); // fmov d0, #2.0
970 __ fmovd(v0, 2.125); // fmov d0, #2.125
971 __ fmovd(v0, 4.0); // fmov d0, #4.0
972 __ fmovd(v0, 4.25); // fmov d0, #4.25
973 __ fmovd(v0, 8.0); // fmov d0, #8.0
974 __ fmovd(v0, 8.5); // fmov d0, #8.5
975 __ fmovd(v0, 16.0); // fmov d0, #16.0
976 __ fmovd(v0, 17.0); // fmov d0, #17.0
977 __ fmovd(v0, 0.125); // fmov d0, #0.125
978 __ fmovd(v0, 0.1328125); // fmov d0, #0.1328125
979 __ fmovd(v0, 0.25); // fmov d0, #0.25
980 __ fmovd(v0, 0.265625); // fmov d0, #0.265625
981 __ fmovd(v0, 0.5); // fmov d0, #0.5
982 __ fmovd(v0, 0.53125); // fmov d0, #0.53125
983 __ fmovd(v0, 1.0); // fmov d0, #1.0
984 __ fmovd(v0, 1.0625); // fmov d0, #1.0625
985 __ fmovd(v0, -2.0); // fmov d0, #-2.0
986 __ fmovd(v0, -2.125); // fmov d0, #-2.125
1135 __ sve_orr(z17, __ S, 917504u); // orr z17.s, z17.s, #0xe0000
1136
1137 // SVEBinaryImmOp
1138 __ sve_add(z19, __ S, 148u); // add z19.s, z19.s, #0x94
1139 __ sve_sub(z22, __ S, 244u); // sub z22.s, z22.s, #0xf4
1140 __ sve_and(z20, __ S, 491520u); // and z20.s, z20.s, #0x78000
1141 __ sve_eor(z17, __ D, 18302628885642084351u); // eor z17.d, z17.d, #0xfe000000007fffff
1142 __ sve_orr(z4, __ D, 18158513714670600195u); // orr z4.d, z4.d, #0xfc000003fc000003
1143
1144 // SVEVectorOp
1145 __ sve_add(z2, __ H, z8, z8); // add z2.h, z8.h, z8.h
1146 __ sve_sub(z24, __ S, z17, z30); // sub z24.s, z17.s, z30.s
1147 __ sve_fadd(z4, __ S, z30, z1); // fadd z4.s, z30.s, z1.s
1148 __ sve_fmul(z19, __ S, z12, z0); // fmul z19.s, z12.s, z0.s
1149 __ sve_fsub(z7, __ S, z24, z17); // fsub z7.s, z24.s, z17.s
1150 __ sve_abs(z27, __ D, p1, z9); // abs z27.d, p1/m, z9.d
1151 __ sve_add(z23, __ D, p3, z16); // add z23.d, p3/m, z23.d, z16.d
1152 __ sve_and(z22, __ D, p5, z20); // and z22.d, p5/m, z22.d, z20.d
1153 __ sve_asr(z28, __ S, p2, z13); // asr z28.s, p2/m, z28.s, z13.s
1154 __ sve_bic(z7, __ H, p5, z28); // bic z7.h, p5/m, z7.h, z28.h
1155 __ sve_clz(z11, __ S, p3, z11); // clz z11.s, p3/m, z11.s
1156 __ sve_cnt(z1, __ S, p6, z8); // cnt z1.s, p6/m, z8.s
1157 __ sve_eor(z13, __ S, p4, z17); // eor z13.s, p4/m, z13.s, z17.s
1158 __ sve_lsl(z4, __ H, p0, z3); // lsl z4.h, p0/m, z4.h, z3.h
1159 __ sve_lsr(z7, __ S, p3, z14); // lsr z7.s, p3/m, z7.s, z14.s
1160 __ sve_mul(z4, __ B, p3, z29); // mul z4.b, p3/m, z4.b, z29.b
1161 __ sve_neg(z0, __ D, p2, z21); // neg z0.d, p2/m, z21.d
1162 __ sve_not(z3, __ S, p0, z9); // not z3.s, p0/m, z9.s
1163 __ sve_orr(z28, __ B, p2, z24); // orr z28.b, p2/m, z28.b, z24.b
1164 __ sve_rbit(z19, __ D, p1, z23); // rbit z19.d, p1/m, z23.d
1165 __ sve_revb(z13, __ D, p5, z10); // revb z13.d, p5/m, z10.d
1166 __ sve_smax(z12, __ S, p4, z30); // smax z12.s, p4/m, z12.s, z30.s
1167 __ sve_smin(z14, __ S, p0, z29); // smin z14.s, p0/m, z14.s, z29.s
1168 __ sve_sub(z21, __ S, p5, z7); // sub z21.s, p5/m, z21.s, z7.s
1169 __ sve_fabs(z2, __ D, p0, z26); // fabs z2.d, p0/m, z26.d
1170 __ sve_fadd(z9, __ D, p4, z17); // fadd z9.d, p4/m, z9.d, z17.d
1171 __ sve_fdiv(z0, __ D, p1, z2); // fdiv z0.d, p1/m, z0.d, z2.d
1172 __ sve_fmax(z14, __ D, p1, z11); // fmax z14.d, p1/m, z14.d, z11.d
1173 __ sve_fmin(z14, __ S, p4, z29); // fmin z14.s, p4/m, z14.s, z29.s
1174 __ sve_fmul(z3, __ S, p0, z22); // fmul z3.s, p0/m, z3.s, z22.s
1175 __ sve_fneg(z3, __ S, p6, z27); // fneg z3.s, p6/m, z27.s
1176 __ sve_frintm(z19, __ D, p5, z7); // frintm z19.d, p5/m, z7.d
1177 __ sve_frintn(z21, __ S, p3, z5); // frintn z21.s, p3/m, z5.s
1178 __ sve_frintp(z25, __ D, p1, z21); // frintp z25.d, p1/m, z21.d
1179 __ sve_fsqrt(z17, __ S, p0, z3); // fsqrt z17.s, p0/m, z3.s
1180 __ sve_fsub(z19, __ S, p3, z7); // fsub z19.s, p3/m, z19.s, z7.s
1181 __ sve_fmad(z14, __ S, p4, z17, z11); // fmad z14.s, p4/m, z17.s, z11.s
1182 __ sve_fmla(z24, __ S, p4, z30, z17); // fmla z24.s, p4/m, z30.s, z17.s
1183 __ sve_fmls(z15, __ D, p3, z26, z22); // fmls z15.d, p3/m, z26.d, z22.d
1184 __ sve_fmsb(z22, __ D, p2, z8, z5); // fmsb z22.d, p2/m, z8.d, z5.d
1185 __ sve_fnmad(z27, __ D, p2, z0, z14); // fnmad z27.d, p2/m, z0.d, z14.d
1186 __ sve_fnmsb(z21, __ D, p5, z0, z3); // fnmsb z21.d, p5/m, z0.d, z3.d
1187 __ sve_fnmla(z25, __ D, p1, z25, z29); // fnmla z25.d, p1/m, z25.d, z29.d
1188 __ sve_fnmls(z17, __ D, p0, z12, z14); // fnmls z17.d, p0/m, z12.d, z14.d
1189 __ sve_mla(z13, __ D, p0, z17, z2); // mla z13.d, p0/m, z17.d, z2.d
1190 __ sve_mls(z20, __ H, p5, z21, z29); // mls z20.h, p5/m, z21.h, z29.h
1191 __ sve_and(z8, z2, z0); // and z8.d, z2.d, z0.d
1192 __ sve_eor(z23, z22, z0); // eor z23.d, z22.d, z0.d
1193 __ sve_orr(z25, z26, z23); // orr z25.d, z26.d, z23.d
1194 __ sve_bic(z21, z21, z1); // bic z21.d, z21.d, z1.d
1195 __ sve_uzp1(z10, __ S, z19, z11); // uzp1 z10.s, z19.s, z11.s
1196 __ sve_uzp2(z23, __ D, z23, z8); // uzp2 z23.d, z23.d, z8.d
1197
1198 // SVEReductionOp
1199 __ sve_andv(v17, __ S, p5, z19); // andv s17, p5, z19.s
1200 __ sve_orv(v4, __ D, p5, z13); // orv d4, p5, z13.d
1201 __ sve_eorv(v22, __ D, p7, z30); // eorv d22, p7, z30.d
1202 __ sve_smaxv(v17, __ H, p4, z14); // smaxv h17, p4, z14.h
1203 __ sve_sminv(v12, __ B, p7, z20); // sminv b12, p7, z20.b
1204 __ sve_fminv(v1, __ S, p3, z13); // fminv s1, p3, z13.s
1205 __ sve_fmaxv(v7, __ D, p2, z11); // fmaxv d7, p2, z11.d
1206 __ sve_fadda(v4, __ S, p6, z15); // fadda s4, p6, s4, z15.s
1207 __ sve_uaddv(v3, __ S, p7, z0); // uaddv d3, p7, z0.s
1208
1209 __ bind(forth);
1210
1211 /*
1212 */
1213
1214 static const unsigned int insns[] =
1215 {
1216 0x8b0d82fa, 0xcb49970c, 0xab889dfc, 0xeb9ee787,
1217 0x0b9b3ec9, 0x4b9179a3, 0x2b88474e, 0x6b8c56c0,
1218 0x8a1a51e0, 0xaa11f4ba, 0xca0281b8, 0xea918c7c,
1219 0x0a5d4a19, 0x2a4b262d, 0x4a513ca5, 0x6a9b6ae2,
1220 0x8a70b79b, 0xaaba9728, 0xca6dfe3d, 0xea627f1c,
1221 0x0aa70f53, 0x2aaa0f06, 0x4a6176a4, 0x6a604eb0,
1222 0x1105ed91, 0x3100583e, 0x5101f8bd, 0x710f0306,
1223 0x9101a1a0, 0xb10a5cc8, 0xd10810aa, 0xf10fd061,
1224 0x120cb166, 0x321764bc, 0x52174681, 0x720c0227,
1225 0x9241018e, 0xb25a2969, 0xd278b411, 0xf26aad01,
1226 0x14000000, 0x17ffffd7, 0x140003ef, 0x94000000,
1227 0x97ffffd4, 0x940003ec, 0x3400000a, 0x34fffa2a,
1228 0x34007d2a, 0x35000008, 0x35fff9c8, 0x35007cc8,
1229 0xb400000b, 0xb4fff96b, 0xb4007c6b, 0xb500001d,
1230 0xb5fff91d, 0xb5007c1d, 0x10000013, 0x10fff8b3,
1231 0x10007bb3, 0x90000013, 0x36300016, 0x3637f836,
1232 0x36307b36, 0x3758000c, 0x375ff7cc, 0x37587acc,
1233 0x128313a0, 0x528a32c7, 0x7289173b, 0x92ab3acc,
1234 0xd2a0bf94, 0xf2c285e8, 0x9358722f, 0x330e652f,
1235 0x53067f3b, 0x93577c53, 0xb34a1aac, 0xd35a4016,
1236 0x13946c63, 0x93c3dbc8, 0x54000000, 0x54fff5a0,
1237 0x540078a0, 0x54000001, 0x54fff541, 0x54007841,
1238 0x54000002, 0x54fff4e2, 0x540077e2, 0x54000002,
1239 0x54fff482, 0x54007782, 0x54000003, 0x54fff423,
1240 0x54007723, 0x54000003, 0x54fff3c3, 0x540076c3,
1241 0x54000004, 0x54fff364, 0x54007664, 0x54000005,
1242 0x54fff305, 0x54007605, 0x54000006, 0x54fff2a6,
1243 0x540075a6, 0x54000007, 0x54fff247, 0x54007547,
1244 0x54000008, 0x54fff1e8, 0x540074e8, 0x54000009,
1245 0x54fff189, 0x54007489, 0x5400000a, 0x54fff12a,
1246 0x5400742a, 0x5400000b, 0x54fff0cb, 0x540073cb,
1247 0x5400000c, 0x54fff06c, 0x5400736c, 0x5400000d,
1248 0x54fff00d, 0x5400730d, 0x5400000e, 0x54ffefae,
1249 0x540072ae, 0x5400000f, 0x54ffef4f, 0x5400724f,
1250 0xd40658e1, 0xd4014d22, 0xd4046543, 0xd4273f60,
1251 0xd44cad80, 0xd503201f, 0xd503203f, 0xd503205f,
1252 0xd503209f, 0xd50320bf, 0xd503219f, 0xd50323bf,
1253 0xd503239f, 0xd50321df, 0xd50323ff, 0xd50323df,
1254 0xd503211f, 0xd503233f, 0xd503231f, 0xd503215f,
1255 0xd503237f, 0xd503235f, 0xd69f03e0, 0xd6bf03e0,
1256 0xd5033fdf, 0xd503207f, 0xd50320ff, 0xd5033e9f,
1257 0xd50332bf, 0xd61f0200, 0xd63f0280, 0xdac123ea,
1258 0xdac127fb, 0xdac12be8, 0xdac12fe0, 0xdac133e1,
1259 0xdac137f5, 0xdac13bf1, 0xdac13ffd, 0xdac147fd,
1260 0xd61f0b9f, 0xd61f0c3f, 0xd63f0aff, 0xd63f0ebf,
1261 0xdac143f4, 0xc8167e7b, 0xc80bfcd0, 0xc85f7c11,
1262 0xc85ffd44, 0xc89ffed8, 0xc8dffe6a, 0x88017fc5,
1263 0x8808fe2c, 0x885f7dc9, 0x885ffc27, 0x889ffe05,
1264 0x88dffd82, 0x480a7c6c, 0x481cff4e, 0x485f7d5e,
1265 0x485ffeae, 0x489ffd2d, 0x48dfff76, 0x081c7d73,
1266 0x081efc53, 0x085f7ee2, 0x085ffc01, 0x089ffe0c,
1267 0x08dffded, 0xc87f55b1, 0xc87ff90b, 0xc8382c2d,
1268 0xc83aedb5, 0x887f0d94, 0x887f87a6, 0x88262e04,
1269 0x8824b2be, 0xf8061366, 0xb802d151, 0x381e32da,
1394 0x25b8efe2, 0x25f8f007, 0x2538dfea, 0x25b8dfeb,
1395 0xa400a3e0, 0xa420a7e0, 0xa4484be0, 0xa467afe0,
1396 0xa4a8a7ea, 0xa547a814, 0xa4084ffe, 0xa55c53e0,
1397 0xa5e1540b, 0xe400fbf6, 0xe408ffff, 0xe420e7e0,
1398 0xe4484be0, 0xe460efe0, 0xe547e400, 0xe4014be0,
1399 0xe4a84fe0, 0xe5f15000, 0x858043e0, 0x85a043ff,
1400 0xe59f5d08, 0x0420e3e9, 0x0460e3ea, 0x04a0e3eb,
1401 0x04e0e3ec, 0x25104042, 0x25104871, 0x25904861,
1402 0x25904c92, 0x05344020, 0x05744041, 0x05b44062,
1403 0x05f44083, 0x252c8840, 0x253c1420, 0x25681572,
1404 0x25a21ce3, 0x25ea1e34, 0x0522c020, 0x05e6c0a4,
1405 0x2401a001, 0x2443a051, 0x24858881, 0x24c78cd1,
1406 0x24850891, 0x24c70cc1, 0x250f9001, 0x25508051,
1407 0x25802491, 0x25df28c1, 0x25850c81, 0x251e10d1,
1408 0x65816001, 0x65c36051, 0x65854891, 0x65c74cc1,
1409 0x05733820, 0x05b238a4, 0x05f138e6, 0x0570396a,
1410 0x65d0a001, 0x65d6a443, 0x65d4a826, 0x6594ac26,
1411 0x6554ac26, 0x6556ac26, 0x6552ac26, 0x65cbac85,
1412 0x65caac01, 0x65dea833, 0x659ca509, 0x65d8a801,
1413 0x65dcac01, 0x655cb241, 0x0520a1e0, 0x0521a601,
1414 0x052281e0, 0x05238601, 0x04a14026, 0x042244a6,
1415 0x046344a6, 0x04a444a6, 0x04e544a7, 0x0568aca7,
1416 0x05b23230, 0x853040af, 0xc5b040af, 0xe57080af,
1417 0xe5b080af, 0x25034440, 0x254054c4, 0x25034640,
1418 0x25415a05, 0x25834440, 0x25c54489, 0x250b5d3a,
1419 0x2550dc20, 0x2518e3e1, 0x2518e021, 0x2518e0a1,
1420 0x2518e121, 0x2518e1a1, 0x2558e3e2, 0x2558e042,
1421 0x2558e0c2, 0x2558e142, 0x2598e3e3, 0x2598e063,
1422 0x2598e0e3, 0x2598e163, 0x25d8e3e4, 0x25d8e084,
1423 0x25d8e104, 0x25d8e184, 0x2518e407, 0x05214800,
1424 0x05614800, 0x05a14800, 0x05e14800, 0x05214c00,
1425 0x05614c00, 0x05a14c00, 0x05e14c00, 0x05304001,
1426 0x05314001, 0x05a18610, 0x05e18610, 0x05271e11,
1427 0x45b0c210, 0x45f1c231, 0x1e601000, 0x1e603000,
1428 0x1e621000, 0x1e623000, 0x1e641000, 0x1e643000,
1429 0x1e661000, 0x1e663000, 0x1e681000, 0x1e683000,
1430 0x1e6a1000, 0x1e6a3000, 0x1e6c1000, 0x1e6c3000,
1431 0x1e6e1000, 0x1e6e3000, 0x1e701000, 0x1e703000,
1432 0x1e721000, 0x1e723000, 0x1e741000, 0x1e743000,
1433 0x1e761000, 0x1e763000, 0x1e781000, 0x1e783000,
1434 0x1e7a1000, 0x1e7a3000, 0x1e7c1000, 0x1e7c3000,
1435 0x1e7e1000, 0x1e7e3000, 0xf82c815f, 0xf8300047,
1436 0xf823126d, 0xf8312070, 0xf82133cb, 0xf82551e8,
1437 0xf83d401e, 0xf8347287, 0xf83762bc, 0xf8bb80b9,
1438 0xf8a10217, 0xf8bf1185, 0xf8a921fc, 0xf8bd33f6,
1439 0xf8b350bf, 0xf8ae43f0, 0xf8b0729b, 0xf8b0616c,
1440 0xf8e983c6, 0xf8f1039b, 0xf8fe1147, 0xf8f4208a,
1441 0xf8f83231, 0xf8f653a3, 0xf8ef4276, 0xf8f37056,
1442 0xf8ef6186, 0xf87081ab, 0xf87703c1, 0xf8731225,
1443 0xf86222d0, 0xf86d32aa, 0xf87d519b, 0xf87b4023,
1444 0xf87f7278, 0xf8716389, 0xb83b80ef, 0xb83503f7,
1445 0xb83913e2, 0xb83b2150, 0xb8373073, 0xb8305320,
1446 0xb83a4057, 0xb830708c, 0xb83c63be, 0xb8b080db,
1447 0xb8a901fd, 0xb8a710e4, 0xb8af22e9, 0xb8a83382,
1448 0xb8b550bf, 0xb8bb4220, 0xb8af7344, 0xb8a862dc,
1449 0xb8fb833b, 0xb8f70080, 0xb8e61010, 0xb8e4202f,
1450 0xb8ea30a7, 0xb8ea50fc, 0xb8f442b7, 0xb8e6710b,
1451 0xb8f160df, 0xb8718182, 0xb87e007d, 0xb87b13b6,
1452 0xb86e238d, 0xb87130b8, 0xb862514e, 0xb870436b,
1453 0xb877708c, 0xb8766091, 0xce304661, 0xce0c09cc,
1454 0xce748c70, 0xce863cb7, 0xce7b8191, 0xce668610,
1455 0xcec08382, 0xce668883, 0x25a0cdd1, 0x25a1c86c,
1456 0x058000b8, 0x054242ca, 0x0500051e, 0x2520cf00,
1457 0x25e1c951, 0x058039ea, 0x05400e1b, 0x05009891,
1458 0x2520c09c, 0x25a1d448, 0x05801e36, 0x05400516,
1459 0x050039fe, 0x2520ce0b, 0x25a1d0c8, 0x058074d9,
1460 0x05404531, 0x05031e84, 0x2560cf1a, 0x2561dda2,
1461 0x058026a3, 0x05404c35, 0x05007851, 0x25a0d293,
1462 0x25a1de96, 0x05808874, 0x05423bb1, 0x050030e4,
1463 0x04680102, 0x04be0638, 0x658103c4, 0x65800993,
1464 0x65910707, 0x04d6a53b, 0x04c00e17, 0x04da1696,
1465 0x049089bc, 0x045b1787, 0x0499ad6b, 0x049ab901,
1466 0x0499122d, 0x04538064, 0x04918dc7, 0x04100fa4,
1467 0x04d7aaa0, 0x049ea123, 0x04180b1c, 0x05e786f3,
1468 0x05e4954d, 0x048813cc, 0x048a03ae, 0x048114f5,
1469 0x04dca342, 0x65c09229, 0x65cd8440, 0x65c6856e,
1470 0x658793ae, 0x658282c3, 0x049dbb63, 0x65c2b4f3,
1471 0x6580acb5, 0x65c1a6b9, 0x658da071, 0x65818cf3,
1472 0x65ab922e, 0x65b113d8, 0x65f62f4f, 0x65e5a916,
1473 0x65eec81b, 0x65e3f415, 0x65fd4739, 0x65ee6191,
1474 0x04c2422d, 0x045d76b4, 0x04203048, 0x04a032d7,
1475 0x04773359, 0x04e132b5, 0x05ab6a6a, 0x05e86ef7,
1476 0x049a3671, 0x04d835a4, 0x04d93fd6, 0x044831d1,
1477 0x040a3e8c, 0x65872da1, 0x65c62967, 0x659839e4,
1478 0x04813c03,
1479 };
1480 // END Generated code -- do not edit
|