< prev index next >

src/hotspot/cpu/aarch64/assembler_aarch64.hpp

Print this page
@@ -3657,26 +3657,46 @@
  
    INSN(sve_lasta, 0b0);
    INSN(sve_lastb, 0b1);
  #undef INSN
  
-   // SVE create index starting from and incremented by immediate
+   // SVE Index Generation:
+   // Create index starting from and incremented by immediate
    void sve_index(FloatRegister Zd, SIMD_RegVariant T, int imm1, int imm2) {
      starti;
+     assert(T != Q, "invalid size");
      f(0b00000100, 31, 24), f(T, 23, 22), f(0b1, 21);
      sf(imm2, 20, 16), f(0b010000, 15, 10);
      sf(imm1, 9, 5), rf(Zd, 0);
    }
  
+   // SVE Index Generation:
+   // Create index starting from general-purpose register and incremented by immediate
+   void sve_index(FloatRegister Zd, SIMD_RegVariant T, Register Rn, int imm) {
+     starti;
+     assert(T != Q, "invalid size");
+     f(0b00000100, 31, 24), f(T, 23, 22), f(0b1, 21);
+     sf(imm, 20, 16), f(0b010001, 15, 10);
+     zrf(Rn, 5), rf(Zd, 0);
+   }
+ 
    // SVE programmable table lookup/permute using vector of element indices
    void sve_tbl(FloatRegister Zd, SIMD_RegVariant T, FloatRegister Zn, FloatRegister Zm) {
      starti;
      assert(T != Q, "invalid size");
      f(0b00000101, 31, 24), f(T, 23, 22), f(0b1, 21), rf(Zm, 16);
      f(0b001100, 15, 10), rf(Zn, 5), rf(Zd, 0);
    }
  
+   // Shuffle active elements of vector to the right and fill with zero
+   void sve_compact(FloatRegister Zd, SIMD_RegVariant T, FloatRegister Zn, PRegister Pg) {
+     starti;
+     assert(T == S || T == D, "invalid size");
+     f(0b00000101, 31, 24), f(T, 23, 22), f(0b100001100, 21, 13);
+     pgrf(Pg, 10), rf(Zn, 5), rf(Zd, 0);
+   }
+ 
    Assembler(CodeBuffer* code) : AbstractAssembler(code) {
    }
  
    // Stack overflow checking
    virtual void bang_stack_with_offset(int offset);
< prev index next >