7 * published by the Free Software Foundation. Oracle designates this
8 * particular file as subject to the "Classpath" exception as provided
9 * by Oracle in the LICENSE file that accompanied this code.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 */
25 package jdk.incubator.vector;
26
27 import java.nio.ByteBuffer;
28 import java.util.Arrays;
29 import java.util.Objects;
30 import java.util.function.IntUnaryOperator;
31
32 import jdk.internal.vm.annotation.ForceInline;
33 import jdk.internal.vm.vector.VectorSupport;
34
35 import static jdk.internal.vm.vector.VectorSupport.*;
36
37 import static jdk.incubator.vector.VectorOperators.*;
38
39 // -- This file was mechanically generated: Do not edit! -- //
40
41 @SuppressWarnings("cast") // warning: redundant cast
42 final class Int128Vector extends IntVector {
43 static final IntSpecies VSPECIES =
44 (IntSpecies) IntVector.SPECIES_128;
45
46 static final VectorShape VSHAPE =
47 VSPECIES.vectorShape();
48
49 static final Class<Int128Vector> VCLASS = Int128Vector.class;
50
51 static final int VSIZE = VSPECIES.vectorBitSize();
457 @ForceInline
458 public Int128Vector rearrange(VectorShuffle<Integer> shuffle,
459 VectorMask<Integer> m) {
460 return (Int128Vector)
461 super.rearrangeTemplate(Int128Shuffle.class,
462 Int128Mask.class,
463 (Int128Shuffle) shuffle,
464 (Int128Mask) m); // specialize
465 }
466
467 @Override
468 @ForceInline
469 public Int128Vector rearrange(VectorShuffle<Integer> s,
470 Vector<Integer> v) {
471 return (Int128Vector)
472 super.rearrangeTemplate(Int128Shuffle.class,
473 (Int128Shuffle) s,
474 (Int128Vector) v); // specialize
475 }
476
477 @Override
478 @ForceInline
479 public Int128Vector selectFrom(Vector<Integer> v) {
480 return (Int128Vector)
481 super.selectFromTemplate((Int128Vector) v); // specialize
482 }
483
484 @Override
485 @ForceInline
486 public Int128Vector selectFrom(Vector<Integer> v,
487 VectorMask<Integer> m) {
488 return (Int128Vector)
489 super.selectFromTemplate((Int128Vector) v,
490 (Int128Mask) m); // specialize
491 }
492
493
494 @ForceInline
495 @Override
496 public int lane(int i) {
636 this, species,
637 (m, s) -> s.maskFactory(m.toArray()).check(s));
638 }
639
640 @Override
641 @ForceInline
642 public Int128Mask eq(VectorMask<Integer> mask) {
643 Objects.requireNonNull(mask);
644 Int128Mask m = (Int128Mask)mask;
645 return xor(m.not());
646 }
647
648 // Unary operations
649
650 @Override
651 @ForceInline
652 public Int128Mask not() {
653 return xor(maskAll(true));
654 }
655
656 // Binary operations
657
658 @Override
659 @ForceInline
660 public Int128Mask and(VectorMask<Integer> mask) {
661 Objects.requireNonNull(mask);
662 Int128Mask m = (Int128Mask)mask;
663 return VectorSupport.binaryOp(VECTOR_OP_AND, Int128Mask.class, null, int.class, VLENGTH,
664 this, m, null,
665 (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a & b));
666 }
667
668 @Override
669 @ForceInline
670 public Int128Mask or(VectorMask<Integer> mask) {
671 Objects.requireNonNull(mask);
672 Int128Mask m = (Int128Mask)mask;
673 return VectorSupport.binaryOp(VECTOR_OP_OR, Int128Mask.class, null, int.class, VLENGTH,
674 this, m, null,
675 (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a | b));
828
829 @ForceInline
830 @Override
831 final
832 IntVector fromArray0(int[] a, int offset, VectorMask<Integer> m) {
833 return super.fromArray0Template(Int128Mask.class, a, offset, (Int128Mask) m); // specialize
834 }
835
836 @ForceInline
837 @Override
838 final
839 IntVector fromArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
840 return super.fromArray0Template(Int128Mask.class, a, offset, indexMap, mapOffset, (Int128Mask) m);
841 }
842
843
844
845 @ForceInline
846 @Override
847 final
848 IntVector fromByteArray0(byte[] a, int offset) {
849 return super.fromByteArray0Template(a, offset); // specialize
850 }
851
852 @ForceInline
853 @Override
854 final
855 IntVector fromByteArray0(byte[] a, int offset, VectorMask<Integer> m) {
856 return super.fromByteArray0Template(Int128Mask.class, a, offset, (Int128Mask) m); // specialize
857 }
858
859 @ForceInline
860 @Override
861 final
862 IntVector fromByteBuffer0(ByteBuffer bb, int offset) {
863 return super.fromByteBuffer0Template(bb, offset); // specialize
864 }
865
866 @ForceInline
867 @Override
868 final
869 IntVector fromByteBuffer0(ByteBuffer bb, int offset, VectorMask<Integer> m) {
870 return super.fromByteBuffer0Template(Int128Mask.class, bb, offset, (Int128Mask) m); // specialize
871 }
872
873 @ForceInline
874 @Override
875 final
876 void intoArray0(int[] a, int offset) {
877 super.intoArray0Template(a, offset); // specialize
878 }
879
880 @ForceInline
881 @Override
882 final
883 void intoArray0(int[] a, int offset, VectorMask<Integer> m) {
884 super.intoArray0Template(Int128Mask.class, a, offset, (Int128Mask) m);
885 }
886
887 @ForceInline
888 @Override
889 final
890 void intoArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
891 super.intoArray0Template(Int128Mask.class, a, offset, indexMap, mapOffset, (Int128Mask) m);
892 }
893
894
895 @ForceInline
896 @Override
897 final
898 void intoByteArray0(byte[] a, int offset) {
899 super.intoByteArray0Template(a, offset); // specialize
900 }
901
902 @ForceInline
903 @Override
904 final
905 void intoByteArray0(byte[] a, int offset, VectorMask<Integer> m) {
906 super.intoByteArray0Template(Int128Mask.class, a, offset, (Int128Mask) m); // specialize
907 }
908
909 @ForceInline
910 @Override
911 final
912 void intoByteBuffer0(ByteBuffer bb, int offset, VectorMask<Integer> m) {
913 super.intoByteBuffer0Template(Int128Mask.class, bb, offset, (Int128Mask) m);
914 }
915
916
917 // End of specialized low-level memory operations.
918
919 // ================================================
920
921 }
|
7 * published by the Free Software Foundation. Oracle designates this
8 * particular file as subject to the "Classpath" exception as provided
9 * by Oracle in the LICENSE file that accompanied this code.
10 *
11 * This code is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 * version 2 for more details (a copy is included in the LICENSE file that
15 * accompanied this code).
16 *
17 * You should have received a copy of the GNU General Public License version
18 * 2 along with this work; if not, write to the Free Software Foundation,
19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20 *
21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22 * or visit www.oracle.com if you need additional information or have any
23 * questions.
24 */
25 package jdk.incubator.vector;
26
27 import java.util.Arrays;
28 import java.util.Objects;
29 import java.util.function.IntUnaryOperator;
30
31 import jdk.incubator.foreign.MemorySegment;
32 import jdk.internal.vm.annotation.ForceInline;
33 import jdk.internal.vm.vector.VectorSupport;
34
35 import static jdk.internal.vm.vector.VectorSupport.*;
36
37 import static jdk.incubator.vector.VectorOperators.*;
38
39 // -- This file was mechanically generated: Do not edit! -- //
40
41 @SuppressWarnings("cast") // warning: redundant cast
42 final class Int128Vector extends IntVector {
43 static final IntSpecies VSPECIES =
44 (IntSpecies) IntVector.SPECIES_128;
45
46 static final VectorShape VSHAPE =
47 VSPECIES.vectorShape();
48
49 static final Class<Int128Vector> VCLASS = Int128Vector.class;
50
51 static final int VSIZE = VSPECIES.vectorBitSize();
457 @ForceInline
458 public Int128Vector rearrange(VectorShuffle<Integer> shuffle,
459 VectorMask<Integer> m) {
460 return (Int128Vector)
461 super.rearrangeTemplate(Int128Shuffle.class,
462 Int128Mask.class,
463 (Int128Shuffle) shuffle,
464 (Int128Mask) m); // specialize
465 }
466
467 @Override
468 @ForceInline
469 public Int128Vector rearrange(VectorShuffle<Integer> s,
470 Vector<Integer> v) {
471 return (Int128Vector)
472 super.rearrangeTemplate(Int128Shuffle.class,
473 (Int128Shuffle) s,
474 (Int128Vector) v); // specialize
475 }
476
477 @Override
478 @ForceInline
479 public Int128Vector compress(VectorMask<Integer> m) {
480 return (Int128Vector)
481 super.compressTemplate(Int128Mask.class,
482 (Int128Mask) m); // specialize
483 }
484
485 @Override
486 @ForceInline
487 public Int128Vector expand(VectorMask<Integer> m) {
488 return (Int128Vector)
489 super.expandTemplate(Int128Mask.class,
490 (Int128Mask) m); // specialize
491 }
492
493 @Override
494 @ForceInline
495 public Int128Vector selectFrom(Vector<Integer> v) {
496 return (Int128Vector)
497 super.selectFromTemplate((Int128Vector) v); // specialize
498 }
499
500 @Override
501 @ForceInline
502 public Int128Vector selectFrom(Vector<Integer> v,
503 VectorMask<Integer> m) {
504 return (Int128Vector)
505 super.selectFromTemplate((Int128Vector) v,
506 (Int128Mask) m); // specialize
507 }
508
509
510 @ForceInline
511 @Override
512 public int lane(int i) {
652 this, species,
653 (m, s) -> s.maskFactory(m.toArray()).check(s));
654 }
655
656 @Override
657 @ForceInline
658 public Int128Mask eq(VectorMask<Integer> mask) {
659 Objects.requireNonNull(mask);
660 Int128Mask m = (Int128Mask)mask;
661 return xor(m.not());
662 }
663
664 // Unary operations
665
666 @Override
667 @ForceInline
668 public Int128Mask not() {
669 return xor(maskAll(true));
670 }
671
672 @Override
673 @ForceInline
674 public Int128Mask compress() {
675 return (Int128Mask)VectorSupport.comExpOp(VectorSupport.VECTOR_OP_MASK_COMPRESS,
676 Int128Vector.class, Int128Mask.class, ETYPE, VLENGTH, null, this,
677 (v1, m1) -> VSPECIES.iota().compare(VectorOperators.LT, m1.trueCount()));
678 }
679
680
681 // Binary operations
682
683 @Override
684 @ForceInline
685 public Int128Mask and(VectorMask<Integer> mask) {
686 Objects.requireNonNull(mask);
687 Int128Mask m = (Int128Mask)mask;
688 return VectorSupport.binaryOp(VECTOR_OP_AND, Int128Mask.class, null, int.class, VLENGTH,
689 this, m, null,
690 (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a & b));
691 }
692
693 @Override
694 @ForceInline
695 public Int128Mask or(VectorMask<Integer> mask) {
696 Objects.requireNonNull(mask);
697 Int128Mask m = (Int128Mask)mask;
698 return VectorSupport.binaryOp(VECTOR_OP_OR, Int128Mask.class, null, int.class, VLENGTH,
699 this, m, null,
700 (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a | b));
853
854 @ForceInline
855 @Override
856 final
857 IntVector fromArray0(int[] a, int offset, VectorMask<Integer> m) {
858 return super.fromArray0Template(Int128Mask.class, a, offset, (Int128Mask) m); // specialize
859 }
860
861 @ForceInline
862 @Override
863 final
864 IntVector fromArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
865 return super.fromArray0Template(Int128Mask.class, a, offset, indexMap, mapOffset, (Int128Mask) m);
866 }
867
868
869
870 @ForceInline
871 @Override
872 final
873 IntVector fromMemorySegment0(MemorySegment ms, long offset) {
874 return super.fromMemorySegment0Template(ms, offset); // specialize
875 }
876
877 @ForceInline
878 @Override
879 final
880 IntVector fromMemorySegment0(MemorySegment ms, long offset, VectorMask<Integer> m) {
881 return super.fromMemorySegment0Template(Int128Mask.class, ms, offset, (Int128Mask) m); // specialize
882 }
883
884 @ForceInline
885 @Override
886 final
887 void intoArray0(int[] a, int offset) {
888 super.intoArray0Template(a, offset); // specialize
889 }
890
891 @ForceInline
892 @Override
893 final
894 void intoArray0(int[] a, int offset, VectorMask<Integer> m) {
895 super.intoArray0Template(Int128Mask.class, a, offset, (Int128Mask) m);
896 }
897
898 @ForceInline
899 @Override
900 final
901 void intoArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
902 super.intoArray0Template(Int128Mask.class, a, offset, indexMap, mapOffset, (Int128Mask) m);
903 }
904
905
906 @ForceInline
907 @Override
908 final
909 void intoMemorySegment0(MemorySegment ms, long offset, VectorMask<Integer> m) {
910 super.intoMemorySegment0Template(Int128Mask.class, ms, offset, (Int128Mask) m);
911 }
912
913
914 // End of specialized low-level memory operations.
915
916 // ================================================
917
918 }
|