< prev index next >

src/jdk.incubator.vector/share/classes/jdk/incubator/vector/Int64Vector.java

Print this page

  7  * published by the Free Software Foundation.  Oracle designates this
  8  * particular file as subject to the "Classpath" exception as provided
  9  * by Oracle in the LICENSE file that accompanied this code.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 package jdk.incubator.vector;
 26 
 27 import java.nio.ByteBuffer;
 28 import java.util.Arrays;
 29 import java.util.Objects;
 30 import java.util.function.IntUnaryOperator;
 31 

 32 import jdk.internal.vm.annotation.ForceInline;
 33 import jdk.internal.vm.vector.VectorSupport;
 34 
 35 import static jdk.internal.vm.vector.VectorSupport.*;
 36 
 37 import static jdk.incubator.vector.VectorOperators.*;
 38 
 39 // -- This file was mechanically generated: Do not edit! -- //
 40 
 41 @SuppressWarnings("cast")  // warning: redundant cast
 42 final class Int64Vector extends IntVector {
 43     static final IntSpecies VSPECIES =
 44         (IntSpecies) IntVector.SPECIES_64;
 45 
 46     static final VectorShape VSHAPE =
 47         VSPECIES.vectorShape();
 48 
 49     static final Class<Int64Vector> VCLASS = Int64Vector.class;
 50 
 51     static final int VSIZE = VSPECIES.vectorBitSize();

457     @ForceInline
458     public Int64Vector rearrange(VectorShuffle<Integer> shuffle,
459                                   VectorMask<Integer> m) {
460         return (Int64Vector)
461             super.rearrangeTemplate(Int64Shuffle.class,
462                                     Int64Mask.class,
463                                     (Int64Shuffle) shuffle,
464                                     (Int64Mask) m);  // specialize
465     }
466 
467     @Override
468     @ForceInline
469     public Int64Vector rearrange(VectorShuffle<Integer> s,
470                                   Vector<Integer> v) {
471         return (Int64Vector)
472             super.rearrangeTemplate(Int64Shuffle.class,
473                                     (Int64Shuffle) s,
474                                     (Int64Vector) v);  // specialize
475     }
476 
















477     @Override
478     @ForceInline
479     public Int64Vector selectFrom(Vector<Integer> v) {
480         return (Int64Vector)
481             super.selectFromTemplate((Int64Vector) v);  // specialize
482     }
483 
484     @Override
485     @ForceInline
486     public Int64Vector selectFrom(Vector<Integer> v,
487                                    VectorMask<Integer> m) {
488         return (Int64Vector)
489             super.selectFromTemplate((Int64Vector) v,
490                                      (Int64Mask) m);  // specialize
491     }
492 
493 
494     @ForceInline
495     @Override
496     public int lane(int i) {

632                 this, species,
633                 (m, s) -> s.maskFactory(m.toArray()).check(s));
634         }
635 
636         @Override
637         @ForceInline
638         public Int64Mask eq(VectorMask<Integer> mask) {
639             Objects.requireNonNull(mask);
640             Int64Mask m = (Int64Mask)mask;
641             return xor(m.not());
642         }
643 
644         // Unary operations
645 
646         @Override
647         @ForceInline
648         public Int64Mask not() {
649             return xor(maskAll(true));
650         }
651 









652         // Binary operations
653 
654         @Override
655         @ForceInline
656         public Int64Mask and(VectorMask<Integer> mask) {
657             Objects.requireNonNull(mask);
658             Int64Mask m = (Int64Mask)mask;
659             return VectorSupport.binaryOp(VECTOR_OP_AND, Int64Mask.class, null, int.class, VLENGTH,
660                                           this, m, null,
661                                           (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a & b));
662         }
663 
664         @Override
665         @ForceInline
666         public Int64Mask or(VectorMask<Integer> mask) {
667             Objects.requireNonNull(mask);
668             Int64Mask m = (Int64Mask)mask;
669             return VectorSupport.binaryOp(VECTOR_OP_OR, Int64Mask.class, null, int.class, VLENGTH,
670                                           this, m, null,
671                                           (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a | b));

824 
825     @ForceInline
826     @Override
827     final
828     IntVector fromArray0(int[] a, int offset, VectorMask<Integer> m) {
829         return super.fromArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);  // specialize
830     }
831 
832     @ForceInline
833     @Override
834     final
835     IntVector fromArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
836         return super.fromArray0Template(Int64Mask.class, a, offset, indexMap, mapOffset, (Int64Mask) m);
837     }
838 
839 
840 
841     @ForceInline
842     @Override
843     final
844     IntVector fromByteArray0(byte[] a, int offset) {
845         return super.fromByteArray0Template(a, offset);  // specialize
846     }
847 
848     @ForceInline
849     @Override
850     final
851     IntVector fromByteArray0(byte[] a, int offset, VectorMask<Integer> m) {
852         return super.fromByteArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);  // specialize
853     }
854 
855     @ForceInline
856     @Override
857     final
858     IntVector fromByteBuffer0(ByteBuffer bb, int offset) {
859         return super.fromByteBuffer0Template(bb, offset);  // specialize
860     }
861 
862     @ForceInline
863     @Override
864     final
865     IntVector fromByteBuffer0(ByteBuffer bb, int offset, VectorMask<Integer> m) {
866         return super.fromByteBuffer0Template(Int64Mask.class, bb, offset, (Int64Mask) m);  // specialize
867     }
868 
869     @ForceInline
870     @Override
871     final
872     void intoArray0(int[] a, int offset) {
873         super.intoArray0Template(a, offset);  // specialize
874     }
875 
876     @ForceInline
877     @Override
878     final
879     void intoArray0(int[] a, int offset, VectorMask<Integer> m) {
880         super.intoArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);
881     }
882 
883     @ForceInline
884     @Override
885     final
886     void intoArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
887         super.intoArray0Template(Int64Mask.class, a, offset, indexMap, mapOffset, (Int64Mask) m);
888     }
889 
890 
891     @ForceInline
892     @Override
893     final
894     void intoByteArray0(byte[] a, int offset) {
895         super.intoByteArray0Template(a, offset);  // specialize
896     }
897 
898     @ForceInline
899     @Override
900     final
901     void intoByteArray0(byte[] a, int offset, VectorMask<Integer> m) {
902         super.intoByteArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);  // specialize
903     }
904 
905     @ForceInline
906     @Override
907     final
908     void intoByteBuffer0(ByteBuffer bb, int offset, VectorMask<Integer> m) {
909         super.intoByteBuffer0Template(Int64Mask.class, bb, offset, (Int64Mask) m);
910     }
911 
912 
913     // End of specialized low-level memory operations.
914 
915     // ================================================
916 
917 }

  7  * published by the Free Software Foundation.  Oracle designates this
  8  * particular file as subject to the "Classpath" exception as provided
  9  * by Oracle in the LICENSE file that accompanied this code.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 package jdk.incubator.vector;
 26 

 27 import java.util.Arrays;
 28 import java.util.Objects;
 29 import java.util.function.IntUnaryOperator;
 30 
 31 import jdk.incubator.foreign.MemorySegment;
 32 import jdk.internal.vm.annotation.ForceInline;
 33 import jdk.internal.vm.vector.VectorSupport;
 34 
 35 import static jdk.internal.vm.vector.VectorSupport.*;
 36 
 37 import static jdk.incubator.vector.VectorOperators.*;
 38 
 39 // -- This file was mechanically generated: Do not edit! -- //
 40 
 41 @SuppressWarnings("cast")  // warning: redundant cast
 42 final class Int64Vector extends IntVector {
 43     static final IntSpecies VSPECIES =
 44         (IntSpecies) IntVector.SPECIES_64;
 45 
 46     static final VectorShape VSHAPE =
 47         VSPECIES.vectorShape();
 48 
 49     static final Class<Int64Vector> VCLASS = Int64Vector.class;
 50 
 51     static final int VSIZE = VSPECIES.vectorBitSize();

457     @ForceInline
458     public Int64Vector rearrange(VectorShuffle<Integer> shuffle,
459                                   VectorMask<Integer> m) {
460         return (Int64Vector)
461             super.rearrangeTemplate(Int64Shuffle.class,
462                                     Int64Mask.class,
463                                     (Int64Shuffle) shuffle,
464                                     (Int64Mask) m);  // specialize
465     }
466 
467     @Override
468     @ForceInline
469     public Int64Vector rearrange(VectorShuffle<Integer> s,
470                                   Vector<Integer> v) {
471         return (Int64Vector)
472             super.rearrangeTemplate(Int64Shuffle.class,
473                                     (Int64Shuffle) s,
474                                     (Int64Vector) v);  // specialize
475     }
476 
477     @Override
478     @ForceInline
479     public Int64Vector compress(VectorMask<Integer> m) {
480         return (Int64Vector)
481             super.compressTemplate(Int64Mask.class,
482                                    (Int64Mask) m);  // specialize
483     }
484 
485     @Override
486     @ForceInline
487     public Int64Vector expand(VectorMask<Integer> m) {
488         return (Int64Vector)
489             super.expandTemplate(Int64Mask.class,
490                                    (Int64Mask) m);  // specialize
491     }
492 
493     @Override
494     @ForceInline
495     public Int64Vector selectFrom(Vector<Integer> v) {
496         return (Int64Vector)
497             super.selectFromTemplate((Int64Vector) v);  // specialize
498     }
499 
500     @Override
501     @ForceInline
502     public Int64Vector selectFrom(Vector<Integer> v,
503                                    VectorMask<Integer> m) {
504         return (Int64Vector)
505             super.selectFromTemplate((Int64Vector) v,
506                                      (Int64Mask) m);  // specialize
507     }
508 
509 
510     @ForceInline
511     @Override
512     public int lane(int i) {

648                 this, species,
649                 (m, s) -> s.maskFactory(m.toArray()).check(s));
650         }
651 
652         @Override
653         @ForceInline
654         public Int64Mask eq(VectorMask<Integer> mask) {
655             Objects.requireNonNull(mask);
656             Int64Mask m = (Int64Mask)mask;
657             return xor(m.not());
658         }
659 
660         // Unary operations
661 
662         @Override
663         @ForceInline
664         public Int64Mask not() {
665             return xor(maskAll(true));
666         }
667 
668         @Override
669         @ForceInline
670         public Int64Mask compress() {
671             return (Int64Mask)VectorSupport.comExpOp(VectorSupport.VECTOR_OP_MASK_COMPRESS,
672                 Int64Vector.class, Int64Mask.class, ETYPE, VLENGTH, null, this,
673                 (v1, m1) -> VSPECIES.iota().compare(VectorOperators.LT, m1.trueCount()));
674         }
675 
676 
677         // Binary operations
678 
679         @Override
680         @ForceInline
681         public Int64Mask and(VectorMask<Integer> mask) {
682             Objects.requireNonNull(mask);
683             Int64Mask m = (Int64Mask)mask;
684             return VectorSupport.binaryOp(VECTOR_OP_AND, Int64Mask.class, null, int.class, VLENGTH,
685                                           this, m, null,
686                                           (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a & b));
687         }
688 
689         @Override
690         @ForceInline
691         public Int64Mask or(VectorMask<Integer> mask) {
692             Objects.requireNonNull(mask);
693             Int64Mask m = (Int64Mask)mask;
694             return VectorSupport.binaryOp(VECTOR_OP_OR, Int64Mask.class, null, int.class, VLENGTH,
695                                           this, m, null,
696                                           (m1, m2, vm) -> m1.bOp(m2, (i, a, b) -> a | b));

849 
850     @ForceInline
851     @Override
852     final
853     IntVector fromArray0(int[] a, int offset, VectorMask<Integer> m) {
854         return super.fromArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);  // specialize
855     }
856 
857     @ForceInline
858     @Override
859     final
860     IntVector fromArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
861         return super.fromArray0Template(Int64Mask.class, a, offset, indexMap, mapOffset, (Int64Mask) m);
862     }
863 
864 
865 
866     @ForceInline
867     @Override
868     final
869     IntVector fromMemorySegment0(MemorySegment ms, long offset) {
870         return super.fromMemorySegment0Template(ms, offset);  // specialize
871     }
872 
873     @ForceInline
874     @Override
875     final
876     IntVector fromMemorySegment0(MemorySegment ms, long offset, VectorMask<Integer> m) {
877         return super.fromMemorySegment0Template(Int64Mask.class, ms, offset, (Int64Mask) m);  // specialize














878     }
879 
880     @ForceInline
881     @Override
882     final
883     void intoArray0(int[] a, int offset) {
884         super.intoArray0Template(a, offset);  // specialize
885     }
886 
887     @ForceInline
888     @Override
889     final
890     void intoArray0(int[] a, int offset, VectorMask<Integer> m) {
891         super.intoArray0Template(Int64Mask.class, a, offset, (Int64Mask) m);
892     }
893 
894     @ForceInline
895     @Override
896     final
897     void intoArray0(int[] a, int offset, int[] indexMap, int mapOffset, VectorMask<Integer> m) {
898         super.intoArray0Template(Int64Mask.class, a, offset, indexMap, mapOffset, (Int64Mask) m);
899     }
900 
901 
902     @ForceInline
903     @Override
904     final
905     void intoMemorySegment0(MemorySegment ms, long offset, VectorMask<Integer> m) {
906         super.intoMemorySegment0Template(Int64Mask.class, ms, offset, (Int64Mask) m);














907     }
908 
909 
910     // End of specialized low-level memory operations.
911 
912     // ================================================
913 
914 }
< prev index next >