71
72 void pminmax(int opcode, BasicType elem_bt, XMMRegister dst, XMMRegister src,
73 XMMRegister tmp = xnoreg);
74 void vpminmax(int opcode, BasicType elem_bt,
75 XMMRegister dst, XMMRegister src1, XMMRegister src2,
76 int vlen_enc);
77
78 void vminmax_fp(int opcode, BasicType elem_bt,
79 XMMRegister dst, XMMRegister a, XMMRegister b,
80 XMMRegister tmp, XMMRegister atmp, XMMRegister btmp,
81 int vlen_enc);
82 void evminmax_fp(int opcode, BasicType elem_bt,
83 XMMRegister dst, XMMRegister a, XMMRegister b,
84 KRegister ktmp, XMMRegister atmp, XMMRegister btmp,
85 int vlen_enc);
86
87 void signum_fp(int opcode, XMMRegister dst,
88 XMMRegister zero, XMMRegister one,
89 Register scratch);
90
91 void vextendbw(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
92 void vextendbw(bool sign, XMMRegister dst, XMMRegister src);
93 void vextendbd(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
94 void vextendwd(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
95
96 void vshiftd(int opcode, XMMRegister dst, XMMRegister shift);
97 void vshiftd_imm(int opcode, XMMRegister dst, int shift);
98 void vshiftd(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
99 void vshiftd_imm(int opcode, XMMRegister dst, XMMRegister nds, int shift, int vector_len);
100 void vshiftw(int opcode, XMMRegister dst, XMMRegister shift);
101 void vshiftw(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
102 void vshiftq(int opcode, XMMRegister dst, XMMRegister shift);
103 void vshiftq_imm(int opcode, XMMRegister dst, int shift);
104 void vshiftq(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
105 void vshiftq_imm(int opcode, XMMRegister dst, XMMRegister nds, int shift, int vector_len);
106
107 void vprotate_imm(int opcode, BasicType etype, XMMRegister dst, XMMRegister src, int shift, int vector_len);
108 void vprotate_var(int opcode, BasicType etype, XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len);
109
110 void varshiftd(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
323 #ifdef _LP64
324 void vector_round_double_evex(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
325 KRegister ktmp1, KRegister ktmp2, AddressLiteral double_sign_flip,
326 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
327
328 void vector_round_float_evex(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
329 KRegister ktmp1, KRegister ktmp2, AddressLiteral double_sign_flip,
330 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
331
332 void vector_round_float_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
333 XMMRegister xtmp3, XMMRegister xtmp4, AddressLiteral float_sign_flip,
334 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
335 #endif
336
337 void evpternlog(XMMRegister dst, int func, KRegister mask, XMMRegister src2, XMMRegister src3,
338 bool merge, BasicType bt, int vlen_enc);
339
340 void evpternlog(XMMRegister dst, int func, KRegister mask, XMMRegister src2, Address src3,
341 bool merge, BasicType bt, int vlen_enc);
342
343 void udivI(Register rax, Register divisor, Register rdx);
344 void umodI(Register rax, Register divisor, Register rdx);
345 void udivmodI(Register rax, Register divisor, Register rdx, Register tmp);
346
347 #ifdef _LP64
348 void udivL(Register rax, Register divisor, Register rdx);
349 void umodL(Register rax, Register divisor, Register rdx);
350 void udivmodL(Register rax, Register divisor, Register rdx, Register tmp);
351 #endif
352 void vector_popcount_int(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
353 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp,
354 int vec_enc);
355
356 void vector_popcount_long(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
357 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp,
358 int vec_enc);
359
360 #endif // CPU_X86_C2_MACROASSEMBLER_X86_HPP
|
71
72 void pminmax(int opcode, BasicType elem_bt, XMMRegister dst, XMMRegister src,
73 XMMRegister tmp = xnoreg);
74 void vpminmax(int opcode, BasicType elem_bt,
75 XMMRegister dst, XMMRegister src1, XMMRegister src2,
76 int vlen_enc);
77
78 void vminmax_fp(int opcode, BasicType elem_bt,
79 XMMRegister dst, XMMRegister a, XMMRegister b,
80 XMMRegister tmp, XMMRegister atmp, XMMRegister btmp,
81 int vlen_enc);
82 void evminmax_fp(int opcode, BasicType elem_bt,
83 XMMRegister dst, XMMRegister a, XMMRegister b,
84 KRegister ktmp, XMMRegister atmp, XMMRegister btmp,
85 int vlen_enc);
86
87 void signum_fp(int opcode, XMMRegister dst,
88 XMMRegister zero, XMMRegister one,
89 Register scratch);
90
91 void vector_compress_expand(int opcode, XMMRegister dst, XMMRegister src, KRegister mask,
92 bool merge, BasicType bt, int vec_enc);
93
94 void vector_mask_compress(KRegister dst, KRegister src, Register rtmp1, Register rtmp2, int mask_len);
95
96 void vextendbw(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
97 void vextendbw(bool sign, XMMRegister dst, XMMRegister src);
98 void vextendbd(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
99 void vextendwd(bool sign, XMMRegister dst, XMMRegister src, int vector_len);
100
101 void vshiftd(int opcode, XMMRegister dst, XMMRegister shift);
102 void vshiftd_imm(int opcode, XMMRegister dst, int shift);
103 void vshiftd(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
104 void vshiftd_imm(int opcode, XMMRegister dst, XMMRegister nds, int shift, int vector_len);
105 void vshiftw(int opcode, XMMRegister dst, XMMRegister shift);
106 void vshiftw(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
107 void vshiftq(int opcode, XMMRegister dst, XMMRegister shift);
108 void vshiftq_imm(int opcode, XMMRegister dst, int shift);
109 void vshiftq(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
110 void vshiftq_imm(int opcode, XMMRegister dst, XMMRegister nds, int shift, int vector_len);
111
112 void vprotate_imm(int opcode, BasicType etype, XMMRegister dst, XMMRegister src, int shift, int vector_len);
113 void vprotate_var(int opcode, BasicType etype, XMMRegister dst, XMMRegister src, XMMRegister shift, int vector_len);
114
115 void varshiftd(int opcode, XMMRegister dst, XMMRegister src, XMMRegister shift, int vlen_enc);
328 #ifdef _LP64
329 void vector_round_double_evex(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
330 KRegister ktmp1, KRegister ktmp2, AddressLiteral double_sign_flip,
331 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
332
333 void vector_round_float_evex(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
334 KRegister ktmp1, KRegister ktmp2, AddressLiteral double_sign_flip,
335 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
336
337 void vector_round_float_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1, XMMRegister xtmp2,
338 XMMRegister xtmp3, XMMRegister xtmp4, AddressLiteral float_sign_flip,
339 AddressLiteral new_mxcsr, Register scratch, int vec_enc);
340 #endif
341
342 void evpternlog(XMMRegister dst, int func, KRegister mask, XMMRegister src2, XMMRegister src3,
343 bool merge, BasicType bt, int vlen_enc);
344
345 void evpternlog(XMMRegister dst, int func, KRegister mask, XMMRegister src2, Address src3,
346 bool merge, BasicType bt, int vlen_enc);
347
348 void vector_reverse_bit(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
349 XMMRegister xtmp2, Register rtmp, int vec_enc);
350
351 void vector_reverse_bit_gfni(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp,
352 AddressLiteral mask, Register rtmp, int vec_enc);
353
354 void vector_reverse_byte(BasicType bt, XMMRegister dst, XMMRegister src, Register rtmp, int vec_enc);
355
356 void udivI(Register rax, Register divisor, Register rdx);
357 void umodI(Register rax, Register divisor, Register rdx);
358 void udivmodI(Register rax, Register divisor, Register rdx, Register tmp);
359
360 #ifdef _LP64
361 void udivL(Register rax, Register divisor, Register rdx);
362 void umodL(Register rax, Register divisor, Register rdx);
363 void udivmodL(Register rax, Register divisor, Register rdx, Register tmp);
364 #endif
365
366 void vector_popcount_int(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
367 XMMRegister xtmp2, Register rtmp, int vec_enc);
368
369 void vector_popcount_long(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
370 XMMRegister xtmp2, Register rtmp, int vec_enc);
371
372 void vector_popcount_short(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
373 XMMRegister xtmp2, Register rtmp, int vec_enc);
374
375 void vector_popcount_byte(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
376 XMMRegister xtmp2, Register rtmp, int vec_enc);
377
378 void vector_popcount_integral(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
379 XMMRegister xtmp2, Register rtmp, int vec_enc);
380
381 void vector_popcount_integral_evex(BasicType bt, XMMRegister dst, XMMRegister src,
382 KRegister mask, bool merge, int vec_enc);
383
384 void vbroadcast(BasicType bt, XMMRegister dst, int imm32, Register rtmp, int vec_enc);
385
386 void vector_reverse_byte64(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
387 XMMRegister xtmp2, Register rtmp, int vec_enc);
388
389
390 void vector_count_leading_zeros_evex(BasicType bt, XMMRegister dst, XMMRegister src,
391 XMMRegister xtmp1, XMMRegister xtmp2, XMMRegister xtmp3,
392 KRegister ktmp, Register rtmp, bool merge, int vec_enc);
393
394 void vector_count_leading_zeros_byte_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
395 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp, int vec_enc);
396
397 void vector_count_leading_zeros_short_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
398 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp, int vec_enc);
399
400 void vector_count_leading_zeros_int_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
401 XMMRegister xtmp2, XMMRegister xtmp3, int vec_enc);
402
403 void vector_count_leading_zeros_long_avx(XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
404 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp, int vec_enc);
405
406 void vector_count_leading_zeros_avx(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
407 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp, int vec_enc);
408
409 void vpadd(BasicType bt, XMMRegister dst, XMMRegister src1, XMMRegister src2, int vec_enc);
410
411 void vpsub(BasicType bt, XMMRegister dst, XMMRegister src1, XMMRegister src2, int vec_enc);
412
413 void vector_count_trailing_zeros_evex(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
414 XMMRegister xtmp2, XMMRegister xtmp3, XMMRegister xtmp4, KRegister ktmp,
415 Register rtmp, int vec_enc);
416
417 void vector_count_trailing_zeros_avx(BasicType bt, XMMRegister dst, XMMRegister src, XMMRegister xtmp1,
418 XMMRegister xtmp2, XMMRegister xtmp3, Register rtmp, int vec_enc);
419
420 #endif // CPU_X86_C2_MACROASSEMBLER_X86_HPP
|