159 class StartNode;
160 class State;
161 class StoreNode;
162 class SubNode;
163 class SubTypeCheckNode;
164 class Type;
165 class TypeNode;
166 class UnlockNode;
167 class VectorNode;
168 class LoadVectorNode;
169 class LoadVectorMaskedNode;
170 class StoreVectorMaskedNode;
171 class LoadVectorGatherNode;
172 class StoreVectorNode;
173 class StoreVectorScatterNode;
174 class VectorMaskCmpNode;
175 class VectorUnboxNode;
176 class VectorSet;
177 class VectorReinterpretNode;
178 class ShiftVNode;
179
180
181 #ifndef OPTO_DU_ITERATOR_ASSERT
182 #ifdef ASSERT
183 #define OPTO_DU_ITERATOR_ASSERT 1
184 #else
185 #define OPTO_DU_ITERATOR_ASSERT 0
186 #endif
187 #endif //OPTO_DU_ITERATOR_ASSERT
188
189 #if OPTO_DU_ITERATOR_ASSERT
190 class DUIterator;
191 class DUIterator_Fast;
192 class DUIterator_Last;
193 #else
194 typedef uint DUIterator;
195 typedef Node** DUIterator_Fast;
196 typedef Node** DUIterator_Last;
197 #endif
198
690 DEFINE_CLASS_ID(ConstraintCast, Type, 1)
691 DEFINE_CLASS_ID(CastII, ConstraintCast, 0)
692 DEFINE_CLASS_ID(CheckCastPP, ConstraintCast, 1)
693 DEFINE_CLASS_ID(CastLL, ConstraintCast, 2)
694 DEFINE_CLASS_ID(CastFF, ConstraintCast, 3)
695 DEFINE_CLASS_ID(CastDD, ConstraintCast, 4)
696 DEFINE_CLASS_ID(CastVV, ConstraintCast, 5)
697 DEFINE_CLASS_ID(CMove, Type, 3)
698 DEFINE_CLASS_ID(SafePointScalarObject, Type, 4)
699 DEFINE_CLASS_ID(DecodeNarrowPtr, Type, 5)
700 DEFINE_CLASS_ID(DecodeN, DecodeNarrowPtr, 0)
701 DEFINE_CLASS_ID(DecodeNKlass, DecodeNarrowPtr, 1)
702 DEFINE_CLASS_ID(EncodeNarrowPtr, Type, 6)
703 DEFINE_CLASS_ID(EncodeP, EncodeNarrowPtr, 0)
704 DEFINE_CLASS_ID(EncodePKlass, EncodeNarrowPtr, 1)
705 DEFINE_CLASS_ID(Vector, Type, 7)
706 DEFINE_CLASS_ID(VectorMaskCmp, Vector, 0)
707 DEFINE_CLASS_ID(VectorUnbox, Vector, 1)
708 DEFINE_CLASS_ID(VectorReinterpret, Vector, 2)
709 DEFINE_CLASS_ID(ShiftV, Vector, 3)
710
711 DEFINE_CLASS_ID(Proj, Node, 3)
712 DEFINE_CLASS_ID(CatchProj, Proj, 0)
713 DEFINE_CLASS_ID(JumpProj, Proj, 1)
714 DEFINE_CLASS_ID(IfProj, Proj, 2)
715 DEFINE_CLASS_ID(IfTrue, IfProj, 0)
716 DEFINE_CLASS_ID(IfFalse, IfProj, 1)
717 DEFINE_CLASS_ID(Parm, Proj, 4)
718 DEFINE_CLASS_ID(MachProj, Proj, 5)
719
720 DEFINE_CLASS_ID(Mem, Node, 4)
721 DEFINE_CLASS_ID(Load, Mem, 0)
722 DEFINE_CLASS_ID(LoadVector, Load, 0)
723 DEFINE_CLASS_ID(LoadVectorGather, LoadVector, 0)
724 DEFINE_CLASS_ID(LoadVectorMasked, LoadVector, 1)
725 DEFINE_CLASS_ID(Store, Mem, 1)
726 DEFINE_CLASS_ID(StoreVector, Store, 0)
727 DEFINE_CLASS_ID(StoreVectorScatter, StoreVector, 0)
728 DEFINE_CLASS_ID(StoreVectorMasked, StoreVector, 1)
729 DEFINE_CLASS_ID(LoadStore, Mem, 2)
759
760 _max_classes = ClassMask_Move
761 };
762 #undef DEFINE_CLASS_ID
763
764 // Flags are sorted by usage frequency.
765 enum NodeFlags {
766 Flag_is_Copy = 1 << 0, // should be first bit to avoid shift
767 Flag_rematerialize = 1 << 1,
768 Flag_needs_anti_dependence_check = 1 << 2,
769 Flag_is_macro = 1 << 3,
770 Flag_is_Con = 1 << 4,
771 Flag_is_cisc_alternate = 1 << 5,
772 Flag_is_dead_loop_safe = 1 << 6,
773 Flag_may_be_short_branch = 1 << 7,
774 Flag_avoid_back_to_back_before = 1 << 8,
775 Flag_avoid_back_to_back_after = 1 << 9,
776 Flag_has_call = 1 << 10,
777 Flag_is_reduction = 1 << 11,
778 Flag_is_scheduled = 1 << 12,
779 Flag_is_expensive = 1 << 13,
780 Flag_is_predicated_vector = 1 << 14,
781 Flag_for_post_loop_opts_igvn = 1 << 15,
782 Flag_is_removed_by_peephole = 1 << 16,
783 _last_flag = Flag_is_removed_by_peephole
784 };
785
786 class PD;
787
788 private:
789 juint _class_id;
790 juint _flags;
791
792 static juint max_flags();
793
794 protected:
795 // These methods should be called from constructors only.
796 void init_class_id(juint c) {
797 _class_id = c; // cast out const
798 }
799 void init_flags(uint fl) {
800 assert(fl <= max_flags(), "invalid node flag");
801 _flags |= fl;
802 }
803 void clear_flag(uint fl) {
919 DEFINE_CLASS_QUERY(MultiBranch)
920 DEFINE_CLASS_QUERY(Opaque1)
921 DEFINE_CLASS_QUERY(OuterStripMinedLoop)
922 DEFINE_CLASS_QUERY(OuterStripMinedLoopEnd)
923 DEFINE_CLASS_QUERY(Parm)
924 DEFINE_CLASS_QUERY(PCTable)
925 DEFINE_CLASS_QUERY(Phi)
926 DEFINE_CLASS_QUERY(Proj)
927 DEFINE_CLASS_QUERY(Region)
928 DEFINE_CLASS_QUERY(Root)
929 DEFINE_CLASS_QUERY(SafePoint)
930 DEFINE_CLASS_QUERY(SafePointScalarObject)
931 DEFINE_CLASS_QUERY(Start)
932 DEFINE_CLASS_QUERY(Store)
933 DEFINE_CLASS_QUERY(Sub)
934 DEFINE_CLASS_QUERY(SubTypeCheck)
935 DEFINE_CLASS_QUERY(Type)
936 DEFINE_CLASS_QUERY(Vector)
937 DEFINE_CLASS_QUERY(VectorMaskCmp)
938 DEFINE_CLASS_QUERY(VectorUnbox)
939 DEFINE_CLASS_QUERY(VectorReinterpret);
940 DEFINE_CLASS_QUERY(LoadVector)
941 DEFINE_CLASS_QUERY(LoadVectorGather)
942 DEFINE_CLASS_QUERY(StoreVector)
943 DEFINE_CLASS_QUERY(StoreVectorScatter)
944 DEFINE_CLASS_QUERY(ShiftV)
945 DEFINE_CLASS_QUERY(Unlock)
946
947 #undef DEFINE_CLASS_QUERY
948
949 // duplicate of is_MachSpillCopy()
950 bool is_SpillCopy () const {
951 return ((_class_id & ClassMask_MachSpillCopy) == Class_MachSpillCopy);
952 }
953
954 bool is_Con () const { return (_flags & Flag_is_Con) != 0; }
955 // The data node which is safe to leave in dead loop during IGVN optimization.
956 bool is_dead_loop_safe() const;
957
958 // is_Copy() returns copied edge index (0 or 1)
959 uint is_Copy() const { return (_flags & Flag_is_Copy); }
977 return this == (const Node*)in(0);
978 else
979 return is_Start();
980 }
981
982 // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root,
983 // Goto and Return. This call also returns the block ending Node.
984 virtual const Node *is_block_proj() const;
985
986 // The node is a "macro" node which needs to be expanded before matching
987 bool is_macro() const { return (_flags & Flag_is_macro) != 0; }
988 // The node is expensive: the best control is set during loop opts
989 bool is_expensive() const { return (_flags & Flag_is_expensive) != 0 && in(0) != NULL; }
990
991 // An arithmetic node which accumulates a data in a loop.
992 // It must have the loop's phi as input and provide a def to the phi.
993 bool is_reduction() const { return (_flags & Flag_is_reduction) != 0; }
994
995 bool is_predicated_vector() const { return (_flags & Flag_is_predicated_vector) != 0; }
996
997 // Used in lcm to mark nodes that have scheduled
998 bool is_scheduled() const { return (_flags & Flag_is_scheduled) != 0; }
999
1000 bool for_post_loop_opts_igvn() const { return (_flags & Flag_for_post_loop_opts_igvn) != 0; }
1001
1002 //----------------- Optimization
1003
1004 // Get the worst-case Type output for this Node.
1005 virtual const class Type *bottom_type() const;
1006
1007 // If we find a better type for a node, try to record it permanently.
1008 // Return true if this node actually changed.
1009 // Be sure to do the hash_delete game in the "rehash" variant.
1010 void raise_bottom_type(const Type* new_type);
1011
1012 // Get the address type with which this node uses and/or defs memory,
1013 // or NULL if none. The address type is conservatively wide.
1014 // Returns non-null for calls, membars, loads, stores, etc.
1015 // Returns TypePtr::BOTTOM if the node touches memory "broadly".
1016 virtual const class TypePtr *adr_type() const { return NULL; }
|
159 class StartNode;
160 class State;
161 class StoreNode;
162 class SubNode;
163 class SubTypeCheckNode;
164 class Type;
165 class TypeNode;
166 class UnlockNode;
167 class VectorNode;
168 class LoadVectorNode;
169 class LoadVectorMaskedNode;
170 class StoreVectorMaskedNode;
171 class LoadVectorGatherNode;
172 class StoreVectorNode;
173 class StoreVectorScatterNode;
174 class VectorMaskCmpNode;
175 class VectorUnboxNode;
176 class VectorSet;
177 class VectorReinterpretNode;
178 class ShiftVNode;
179 class ExpandVNode;
180 class CompressVNode;
181 class CompressMNode;
182
183
184 #ifndef OPTO_DU_ITERATOR_ASSERT
185 #ifdef ASSERT
186 #define OPTO_DU_ITERATOR_ASSERT 1
187 #else
188 #define OPTO_DU_ITERATOR_ASSERT 0
189 #endif
190 #endif //OPTO_DU_ITERATOR_ASSERT
191
192 #if OPTO_DU_ITERATOR_ASSERT
193 class DUIterator;
194 class DUIterator_Fast;
195 class DUIterator_Last;
196 #else
197 typedef uint DUIterator;
198 typedef Node** DUIterator_Fast;
199 typedef Node** DUIterator_Last;
200 #endif
201
693 DEFINE_CLASS_ID(ConstraintCast, Type, 1)
694 DEFINE_CLASS_ID(CastII, ConstraintCast, 0)
695 DEFINE_CLASS_ID(CheckCastPP, ConstraintCast, 1)
696 DEFINE_CLASS_ID(CastLL, ConstraintCast, 2)
697 DEFINE_CLASS_ID(CastFF, ConstraintCast, 3)
698 DEFINE_CLASS_ID(CastDD, ConstraintCast, 4)
699 DEFINE_CLASS_ID(CastVV, ConstraintCast, 5)
700 DEFINE_CLASS_ID(CMove, Type, 3)
701 DEFINE_CLASS_ID(SafePointScalarObject, Type, 4)
702 DEFINE_CLASS_ID(DecodeNarrowPtr, Type, 5)
703 DEFINE_CLASS_ID(DecodeN, DecodeNarrowPtr, 0)
704 DEFINE_CLASS_ID(DecodeNKlass, DecodeNarrowPtr, 1)
705 DEFINE_CLASS_ID(EncodeNarrowPtr, Type, 6)
706 DEFINE_CLASS_ID(EncodeP, EncodeNarrowPtr, 0)
707 DEFINE_CLASS_ID(EncodePKlass, EncodeNarrowPtr, 1)
708 DEFINE_CLASS_ID(Vector, Type, 7)
709 DEFINE_CLASS_ID(VectorMaskCmp, Vector, 0)
710 DEFINE_CLASS_ID(VectorUnbox, Vector, 1)
711 DEFINE_CLASS_ID(VectorReinterpret, Vector, 2)
712 DEFINE_CLASS_ID(ShiftV, Vector, 3)
713 DEFINE_CLASS_ID(CompressV, Vector, 4)
714 DEFINE_CLASS_ID(ExpandV, Vector, 5)
715 DEFINE_CLASS_ID(CompressM, Vector, 6)
716
717 DEFINE_CLASS_ID(Proj, Node, 3)
718 DEFINE_CLASS_ID(CatchProj, Proj, 0)
719 DEFINE_CLASS_ID(JumpProj, Proj, 1)
720 DEFINE_CLASS_ID(IfProj, Proj, 2)
721 DEFINE_CLASS_ID(IfTrue, IfProj, 0)
722 DEFINE_CLASS_ID(IfFalse, IfProj, 1)
723 DEFINE_CLASS_ID(Parm, Proj, 4)
724 DEFINE_CLASS_ID(MachProj, Proj, 5)
725
726 DEFINE_CLASS_ID(Mem, Node, 4)
727 DEFINE_CLASS_ID(Load, Mem, 0)
728 DEFINE_CLASS_ID(LoadVector, Load, 0)
729 DEFINE_CLASS_ID(LoadVectorGather, LoadVector, 0)
730 DEFINE_CLASS_ID(LoadVectorMasked, LoadVector, 1)
731 DEFINE_CLASS_ID(Store, Mem, 1)
732 DEFINE_CLASS_ID(StoreVector, Store, 0)
733 DEFINE_CLASS_ID(StoreVectorScatter, StoreVector, 0)
734 DEFINE_CLASS_ID(StoreVectorMasked, StoreVector, 1)
735 DEFINE_CLASS_ID(LoadStore, Mem, 2)
765
766 _max_classes = ClassMask_Move
767 };
768 #undef DEFINE_CLASS_ID
769
770 // Flags are sorted by usage frequency.
771 enum NodeFlags {
772 Flag_is_Copy = 1 << 0, // should be first bit to avoid shift
773 Flag_rematerialize = 1 << 1,
774 Flag_needs_anti_dependence_check = 1 << 2,
775 Flag_is_macro = 1 << 3,
776 Flag_is_Con = 1 << 4,
777 Flag_is_cisc_alternate = 1 << 5,
778 Flag_is_dead_loop_safe = 1 << 6,
779 Flag_may_be_short_branch = 1 << 7,
780 Flag_avoid_back_to_back_before = 1 << 8,
781 Flag_avoid_back_to_back_after = 1 << 9,
782 Flag_has_call = 1 << 10,
783 Flag_is_reduction = 1 << 11,
784 Flag_is_scheduled = 1 << 12,
785 Flag_has_vector_mask_set = 1 << 13,
786 Flag_is_expensive = 1 << 14,
787 Flag_is_predicated_vector = 1 << 15,
788 Flag_for_post_loop_opts_igvn = 1 << 16,
789 Flag_is_removed_by_peephole = 1 << 17,
790 Flag_is_predicated_using_blend = 1 << 18,
791 _last_flag = Flag_is_predicated_using_blend
792 };
793
794 class PD;
795
796 private:
797 juint _class_id;
798 juint _flags;
799
800 static juint max_flags();
801
802 protected:
803 // These methods should be called from constructors only.
804 void init_class_id(juint c) {
805 _class_id = c; // cast out const
806 }
807 void init_flags(uint fl) {
808 assert(fl <= max_flags(), "invalid node flag");
809 _flags |= fl;
810 }
811 void clear_flag(uint fl) {
927 DEFINE_CLASS_QUERY(MultiBranch)
928 DEFINE_CLASS_QUERY(Opaque1)
929 DEFINE_CLASS_QUERY(OuterStripMinedLoop)
930 DEFINE_CLASS_QUERY(OuterStripMinedLoopEnd)
931 DEFINE_CLASS_QUERY(Parm)
932 DEFINE_CLASS_QUERY(PCTable)
933 DEFINE_CLASS_QUERY(Phi)
934 DEFINE_CLASS_QUERY(Proj)
935 DEFINE_CLASS_QUERY(Region)
936 DEFINE_CLASS_QUERY(Root)
937 DEFINE_CLASS_QUERY(SafePoint)
938 DEFINE_CLASS_QUERY(SafePointScalarObject)
939 DEFINE_CLASS_QUERY(Start)
940 DEFINE_CLASS_QUERY(Store)
941 DEFINE_CLASS_QUERY(Sub)
942 DEFINE_CLASS_QUERY(SubTypeCheck)
943 DEFINE_CLASS_QUERY(Type)
944 DEFINE_CLASS_QUERY(Vector)
945 DEFINE_CLASS_QUERY(VectorMaskCmp)
946 DEFINE_CLASS_QUERY(VectorUnbox)
947 DEFINE_CLASS_QUERY(VectorReinterpret)
948 DEFINE_CLASS_QUERY(CompressV)
949 DEFINE_CLASS_QUERY(ExpandV)
950 DEFINE_CLASS_QUERY(CompressM)
951 DEFINE_CLASS_QUERY(LoadVector)
952 DEFINE_CLASS_QUERY(LoadVectorGather)
953 DEFINE_CLASS_QUERY(StoreVector)
954 DEFINE_CLASS_QUERY(StoreVectorScatter)
955 DEFINE_CLASS_QUERY(ShiftV)
956 DEFINE_CLASS_QUERY(Unlock)
957
958 #undef DEFINE_CLASS_QUERY
959
960 // duplicate of is_MachSpillCopy()
961 bool is_SpillCopy () const {
962 return ((_class_id & ClassMask_MachSpillCopy) == Class_MachSpillCopy);
963 }
964
965 bool is_Con () const { return (_flags & Flag_is_Con) != 0; }
966 // The data node which is safe to leave in dead loop during IGVN optimization.
967 bool is_dead_loop_safe() const;
968
969 // is_Copy() returns copied edge index (0 or 1)
970 uint is_Copy() const { return (_flags & Flag_is_Copy); }
988 return this == (const Node*)in(0);
989 else
990 return is_Start();
991 }
992
993 // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root,
994 // Goto and Return. This call also returns the block ending Node.
995 virtual const Node *is_block_proj() const;
996
997 // The node is a "macro" node which needs to be expanded before matching
998 bool is_macro() const { return (_flags & Flag_is_macro) != 0; }
999 // The node is expensive: the best control is set during loop opts
1000 bool is_expensive() const { return (_flags & Flag_is_expensive) != 0 && in(0) != NULL; }
1001
1002 // An arithmetic node which accumulates a data in a loop.
1003 // It must have the loop's phi as input and provide a def to the phi.
1004 bool is_reduction() const { return (_flags & Flag_is_reduction) != 0; }
1005
1006 bool is_predicated_vector() const { return (_flags & Flag_is_predicated_vector) != 0; }
1007
1008 bool is_predicated_using_blend() const { return (_flags & Flag_is_predicated_using_blend) != 0; }
1009
1010 // The node is a CountedLoopEnd with a mask annotation so as to emit a restore context
1011 bool has_vector_mask_set() const { return (_flags & Flag_has_vector_mask_set) != 0; }
1012
1013 // Used in lcm to mark nodes that have scheduled
1014 bool is_scheduled() const { return (_flags & Flag_is_scheduled) != 0; }
1015
1016 bool for_post_loop_opts_igvn() const { return (_flags & Flag_for_post_loop_opts_igvn) != 0; }
1017
1018 //----------------- Optimization
1019
1020 // Get the worst-case Type output for this Node.
1021 virtual const class Type *bottom_type() const;
1022
1023 // If we find a better type for a node, try to record it permanently.
1024 // Return true if this node actually changed.
1025 // Be sure to do the hash_delete game in the "rehash" variant.
1026 void raise_bottom_type(const Type* new_type);
1027
1028 // Get the address type with which this node uses and/or defs memory,
1029 // or NULL if none. The address type is conservatively wide.
1030 // Returns non-null for calls, membars, loads, stores, etc.
1031 // Returns TypePtr::BOTTOM if the node touches memory "broadly".
1032 virtual const class TypePtr *adr_type() const { return NULL; }
|