< prev index next >

src/hotspot/cpu/aarch64/aarch64_ad.m4

Print this page




 137 BOTH_INVERTED_INSNS(And, bic)
 138 BOTH_INVERTED_INSNS(Or, orn)
 139 BOTH_INVERTED_INSNS(Xor, eon)
 140 ALL_INVERTED_SHIFT_KINDS(And, bic)
 141 ALL_INVERTED_SHIFT_KINDS(Xor, eon)
 142 ALL_INVERTED_SHIFT_KINDS(Or, orn)
 143 ALL_SHIFT_KINDS(And, andr)
 144 ALL_SHIFT_KINDS(Xor, eor)
 145 ALL_SHIFT_KINDS(Or, orr)
 146 ALL_SHIFT_KINDS(Add, add)
 147 ALL_SHIFT_KINDS(Sub, sub)
 148 dnl
 149 dnl EXTEND mode, rshift_op, src, lshift_count, rshift_count
 150 define(`EXTEND', `($2$1 (LShift$1 $3 $4) $5)')
 151 define(`BFM_INSN',`
 152 // Shift Left followed by Shift Right.
 153 // This idiom is used by the compiler for the i2b bytecode etc.
 154 instruct $4$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI lshift_count, immI rshift_count)
 155 %{
 156   match(Set dst EXTEND($1, $3, src, lshift_count, rshift_count));




 157   ins_cost(INSN_COST * 2);
 158   format %{ "$4  $dst, $src, $rshift_count - $lshift_count, #$2 - $lshift_count" %}
 159   ins_encode %{
 160     int lshift = $lshift_count$$constant & $2;
 161     int rshift = $rshift_count$$constant & $2;
 162     int s = $2 - lshift;
 163     int r = (rshift - lshift) & $2;
 164     __ $4(as_Register($dst$$reg),
 165             as_Register($src$$reg),
 166             r, s);
 167   %}
 168 
 169   ins_pipe(ialu_reg_shift);
 170 %}')
 171 BFM_INSN(L, 63, RShift, sbfm)
 172 BFM_INSN(I, 31, RShift, sbfmw)
 173 BFM_INSN(L, 63, URShift, ubfm)
 174 BFM_INSN(I, 31, URShift, ubfmw)
 175 dnl
 176 // Bitfield extract with shift & mask
 177 define(`BFX_INSN',
 178 `instruct $3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI rshift, imm$1_bitmask mask)
 179 %{
 180   match(Set dst (And$1 ($2$1 src rshift) mask));
 181   // Make sure we are not going to exceed what $3 can do.
 182   predicate((exact_log2$6(n->in(2)->get_$5() + 1) + (n->in(1)->in(2)->get_int() & $4)) <= ($4 + 1));
 183 
 184   ins_cost(INSN_COST);
 185   format %{ "$3 $dst, $src, $rshift, $mask" %}
 186   ins_encode %{
 187     int rshift = $rshift$$constant & $4;
 188     long mask = $mask$$constant;
 189     int width = exact_log2$6(mask+1);
 190     __ $3(as_Register($dst$$reg),
 191             as_Register($src$$reg), rshift, width);
 192   %}
 193   ins_pipe(ialu_reg_shift);
 194 %}')
 195 BFX_INSN(I, URShift, ubfxw, 31, int)
 196 BFX_INSN(L, URShift, ubfx,  63, long, _long)
 197 
 198 // We can use ubfx when extending an And with a mask when we know mask
 199 // is positive.  We know that because immI_bitmask guarantees it.
 200 instruct ubfxIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI rshift, immI_bitmask mask)
 201 %{
 202   match(Set dst (ConvI2L (AndI (URShiftI src rshift) mask)));
 203   // Make sure we are not going to exceed what ubfxw can do.
 204   predicate((exact_log2(n->in(1)->in(2)->get_int() + 1) + (n->in(1)->in(1)->in(2)->get_int() & 31)) <= (31 + 1));
 205 
 206   ins_cost(INSN_COST * 2);
 207   format %{ "ubfx $dst, $src, $rshift, $mask" %}
 208   ins_encode %{
 209     int rshift = $rshift$$constant & 31;
 210     long mask = $mask$$constant;
 211     int width = exact_log2(mask+1);
 212     __ ubfx(as_Register($dst$$reg),
 213             as_Register($src$$reg), rshift, width);
 214   %}
 215   ins_pipe(ialu_reg_shift);
 216 %}
 217 
 218 define(`UBFIZ_INSN',
 219 // We can use ubfiz when masking by a positive number and then left shifting the result.
 220 // We know that the mask is positive because imm$1_bitmask guarantees it.
 221 `instruct $2$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI lshift, imm$1_bitmask mask)
 222 %{
 223   match(Set dst (LShift$1 (And$1 src mask) lshift));
 224   predicate((exact_log2$5(n->in(1)->in(2)->get_$4() + 1) + (n->in(2)->get_int() & $3)) <= ($3 + 1));

 225 
 226   ins_cost(INSN_COST);
 227   format %{ "$2 $dst, $src, $lshift, $mask" %}
 228   ins_encode %{
 229     int lshift = $lshift$$constant & $3;
 230     long mask = $mask$$constant;
 231     int width = exact_log2$5(mask+1);
 232     __ $2(as_Register($dst$$reg),
 233           as_Register($src$$reg), lshift, width);
 234   %}
 235   ins_pipe(ialu_reg_shift);
 236 %}')
 237 UBFIZ_INSN(I, ubfizw, 31, int)
 238 UBFIZ_INSN(L, ubfiz,  63, long, _long)
 239 
 240 // If there is a convert I to L block between and AndI and a LShiftL, we can also match ubfiz
 241 instruct ubfizIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI lshift, immI_bitmask mask)
 242 %{
 243   match(Set dst (LShiftL (ConvI2L (AndI src mask)) lshift));
 244   predicate((exact_log2(n->in(1)->in(1)->in(2)->get_int() + 1) + (n->in(2)->get_int() & 63)) <= (63 + 1));

 245 
 246   ins_cost(INSN_COST);
 247   format %{ "ubfiz $dst, $src, $lshift, $mask" %}
 248   ins_encode %{
 249     int lshift = $lshift$$constant & 63;
 250     long mask = $mask$$constant;
 251     int width = exact_log2(mask+1);
 252     __ ubfiz(as_Register($dst$$reg),
 253              as_Register($src$$reg), lshift, width);
 254   %}
 255   ins_pipe(ialu_reg_shift);
 256 %}
 257 
 258 // Rotations
 259 
 260 define(`EXTRACT_INSN',
 261 `instruct extr$3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src1, iReg$1`'ORL2I($1) src2, immI lshift, immI rshift, rFlagsReg cr)
 262 %{
 263   match(Set dst ($3$1 (LShift$1 src1 lshift) (URShift$1 src2 rshift)));
 264   predicate(0 == (((n->in(1)->in(2)->get_int() & $2) + (n->in(2)->in(2)->get_int() & $2)) & $2));
 265 
 266   ins_cost(INSN_COST);
 267   format %{ "extr $dst, $src1, $src2, #$rshift" %}
 268 
 269   ins_encode %{
 270     __ $4(as_Register($dst$$reg), as_Register($src1$$reg), as_Register($src2$$reg),
 271             $rshift$$constant & $2);
 272   %}
 273   ins_pipe(ialu_reg_reg_extr);
 274 %}
 275 ')dnl
 276 EXTRACT_INSN(L, 63, Or, extr)
 277 EXTRACT_INSN(I, 31, Or, extrw)
 278 EXTRACT_INSN(L, 63, Add, extr)
 279 EXTRACT_INSN(I, 31, Add, extrw)
 280 define(`ROL_EXPAND', `
 281 // $2 expander
 282 
 283 instruct $2$1_rReg(iReg$1NoSp dst, iReg$1 src, iRegI shift, rFlagsReg cr)
 284 %{




 137 BOTH_INVERTED_INSNS(And, bic)
 138 BOTH_INVERTED_INSNS(Or, orn)
 139 BOTH_INVERTED_INSNS(Xor, eon)
 140 ALL_INVERTED_SHIFT_KINDS(And, bic)
 141 ALL_INVERTED_SHIFT_KINDS(Xor, eon)
 142 ALL_INVERTED_SHIFT_KINDS(Or, orn)
 143 ALL_SHIFT_KINDS(And, andr)
 144 ALL_SHIFT_KINDS(Xor, eor)
 145 ALL_SHIFT_KINDS(Or, orr)
 146 ALL_SHIFT_KINDS(Add, add)
 147 ALL_SHIFT_KINDS(Sub, sub)
 148 dnl
 149 dnl EXTEND mode, rshift_op, src, lshift_count, rshift_count
 150 define(`EXTEND', `($2$1 (LShift$1 $3 $4) $5)')
 151 define(`BFM_INSN',`
 152 // Shift Left followed by Shift Right.
 153 // This idiom is used by the compiler for the i2b bytecode etc.
 154 instruct $4$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI lshift_count, immI rshift_count)
 155 %{
 156   match(Set dst EXTEND($1, $3, src, lshift_count, rshift_count));
 157   // Make sure we are not going to exceed what $4 can do.
 158   predicate((unsigned int)n->in(2)->get_int() <= $2
 159             && (unsigned int)n->in(1)->in(2)->get_int() <= $2);
 160 
 161   ins_cost(INSN_COST * 2);
 162   format %{ "$4  $dst, $src, $rshift_count - $lshift_count, #$2 - $lshift_count" %}
 163   ins_encode %{
 164     int lshift = $lshift_count$$constant, rshift = $rshift_count$$constant;

 165     int s = $2 - lshift;
 166     int r = (rshift - lshift) & $2;
 167     __ $4(as_Register($dst$$reg),
 168             as_Register($src$$reg),
 169             r, s);
 170   %}
 171 
 172   ins_pipe(ialu_reg_shift);
 173 %}')
 174 BFM_INSN(L, 63, RShift, sbfm)
 175 BFM_INSN(I, 31, RShift, sbfmw)
 176 BFM_INSN(L, 63, URShift, ubfm)
 177 BFM_INSN(I, 31, URShift, ubfmw)
 178 dnl
 179 // Bitfield extract with shift & mask
 180 define(`BFX_INSN',
 181 `instruct $3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI rshift, imm$1_bitmask mask)
 182 %{
 183   match(Set dst (And$1 ($2$1 src rshift) mask));


 184 
 185   ins_cost(INSN_COST);
 186   format %{ "$3 $dst, $src, $rshift, $mask" %}
 187   ins_encode %{
 188     int rshift = $rshift$$constant;
 189     long mask = $mask$$constant;
 190     int width = exact_log2(mask+1);
 191     __ $3(as_Register($dst$$reg),
 192             as_Register($src$$reg), rshift, width);
 193   %}
 194   ins_pipe(ialu_reg_shift);
 195 %}')
 196 BFX_INSN(I,URShift,ubfxw)
 197 BFX_INSN(L,URShift,ubfx)
 198 
 199 // We can use ubfx when extending an And with a mask when we know mask
 200 // is positive.  We know that because immI_bitmask guarantees it.
 201 instruct ubfxIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI rshift, immI_bitmask mask)
 202 %{
 203   match(Set dst (ConvI2L (AndI (URShiftI src rshift) mask)));


 204 
 205   ins_cost(INSN_COST * 2);
 206   format %{ "ubfx $dst, $src, $rshift, $mask" %}
 207   ins_encode %{
 208     int rshift = $rshift$$constant;
 209     long mask = $mask$$constant;
 210     int width = exact_log2(mask+1);
 211     __ ubfx(as_Register($dst$$reg),
 212             as_Register($src$$reg), rshift, width);
 213   %}
 214   ins_pipe(ialu_reg_shift);
 215 %}
 216 
 217 define(`UBFIZ_INSN',
 218 // We can use ubfiz when masking by a positive number and then left shifting the result.
 219 // We know that the mask is positive because imm$1_bitmask guarantees it.
 220 `instruct $2$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src, immI lshift, imm$1_bitmask mask)
 221 %{
 222   match(Set dst (LShift$1 (And$1 src mask) lshift));
 223   predicate((unsigned int)n->in(2)->get_int() <= $3 &&
 224     (exact_log2$5(n->in(1)->in(2)->get_$4()+1) + (unsigned int)n->in(2)->get_int()) <= ($3+1));
 225 
 226   ins_cost(INSN_COST);
 227   format %{ "$2 $dst, $src, $lshift, $mask" %}
 228   ins_encode %{
 229     int lshift = $lshift$$constant;
 230     long mask = $mask$$constant;
 231     int width = exact_log2(mask+1);
 232     __ $2(as_Register($dst$$reg),
 233           as_Register($src$$reg), lshift, width);
 234   %}
 235   ins_pipe(ialu_reg_shift);
 236 %}')
 237 UBFIZ_INSN(I, ubfizw, 31, int)
 238 UBFIZ_INSN(L, ubfiz, 63, long, _long)
 239 
 240 // If there is a convert I to L block between and AndI and a LShiftL, we can also match ubfiz
 241 instruct ubfizIConvI2L(iRegLNoSp dst, iRegIorL2I src, immI lshift, immI_bitmask mask)
 242 %{
 243   match(Set dst (LShiftL (ConvI2L(AndI src mask)) lshift));
 244   predicate((unsigned int)n->in(2)->get_int() <= 31 &&
 245     (exact_log2((unsigned int)n->in(1)->in(1)->in(2)->get_int()+1) + (unsigned int)n->in(2)->get_int()) <= 32);
 246 
 247   ins_cost(INSN_COST);
 248   format %{ "ubfiz $dst, $src, $lshift, $mask" %}
 249   ins_encode %{
 250     int lshift = $lshift$$constant;
 251     long mask = $mask$$constant;
 252     int width = exact_log2(mask+1);
 253     __ ubfiz(as_Register($dst$$reg),
 254              as_Register($src$$reg), lshift, width);
 255   %}
 256   ins_pipe(ialu_reg_shift);
 257 %}
 258 
 259 // Rotations
 260 
 261 define(`EXTRACT_INSN',
 262 `instruct extr$3$1(iReg$1NoSp dst, iReg$1`'ORL2I($1) src1, iReg$1`'ORL2I($1) src2, immI lshift, immI rshift, rFlagsReg cr)
 263 %{
 264   match(Set dst ($3$1 (LShift$1 src1 lshift) (URShift$1 src2 rshift)));
 265   predicate(0 == ((n->in(1)->in(2)->get_int() + n->in(2)->in(2)->get_int()) & $2));
 266 
 267   ins_cost(INSN_COST);
 268   format %{ "extr $dst, $src1, $src2, #$rshift" %}
 269 
 270   ins_encode %{
 271     __ $4(as_Register($dst$$reg), as_Register($src1$$reg), as_Register($src2$$reg),
 272             $rshift$$constant & $2);
 273   %}
 274   ins_pipe(ialu_reg_reg_extr);
 275 %}
 276 ')dnl
 277 EXTRACT_INSN(L, 63, Or, extr)
 278 EXTRACT_INSN(I, 31, Or, extrw)
 279 EXTRACT_INSN(L, 63, Add, extr)
 280 EXTRACT_INSN(I, 31, Add, extrw)
 281 define(`ROL_EXPAND', `
 282 // $2 expander
 283 
 284 instruct $2$1_rReg(iReg$1NoSp dst, iReg$1 src, iRegI shift, rFlagsReg cr)
 285 %{


< prev index next >