< prev index next >

src/hotspot/share/opto/node.hpp

Print this page

 154 class RegionNode;
 155 class RootNode;
 156 class SafePointNode;
 157 class SafePointScalarObjectNode;
 158 class StartNode;
 159 class State;
 160 class StoreNode;
 161 class SubNode;
 162 class SubTypeCheckNode;
 163 class Type;
 164 class TypeNode;
 165 class UnlockNode;
 166 class VectorNode;
 167 class LoadVectorNode;
 168 class LoadVectorMaskedNode;
 169 class StoreVectorMaskedNode;
 170 class LoadVectorGatherNode;
 171 class StoreVectorNode;
 172 class StoreVectorScatterNode;
 173 class VectorMaskCmpNode;

 174 class VectorSet;

 175 
 176 // The type of all node counts and indexes.
 177 // It must hold at least 16 bits, but must also be fast to load and store.
 178 // This type, if less than 32 bits, could limit the number of possible nodes.
 179 // (To make this type platform-specific, move to globalDefinitions_xxx.hpp.)
 180 typedef unsigned int node_idx_t;
 181 
 182 
 183 #ifndef OPTO_DU_ITERATOR_ASSERT
 184 #ifdef ASSERT
 185 #define OPTO_DU_ITERATOR_ASSERT 1
 186 #else
 187 #define OPTO_DU_ITERATOR_ASSERT 0
 188 #endif
 189 #endif //OPTO_DU_ITERATOR_ASSERT
 190 
 191 #if OPTO_DU_ITERATOR_ASSERT
 192 class DUIterator;
 193 class DUIterator_Fast;
 194 class DUIterator_Last;

 689 
 690     DEFINE_CLASS_ID(Type,  Node, 2)
 691       DEFINE_CLASS_ID(Phi,   Type, 0)
 692       DEFINE_CLASS_ID(ConstraintCast, Type, 1)
 693         DEFINE_CLASS_ID(CastII, ConstraintCast, 0)
 694         DEFINE_CLASS_ID(CheckCastPP, ConstraintCast, 1)
 695         DEFINE_CLASS_ID(CastLL, ConstraintCast, 2)
 696         DEFINE_CLASS_ID(CastFF, ConstraintCast, 3)
 697         DEFINE_CLASS_ID(CastDD, ConstraintCast, 4)
 698         DEFINE_CLASS_ID(CastVV, ConstraintCast, 5)
 699       DEFINE_CLASS_ID(CMove, Type, 3)
 700       DEFINE_CLASS_ID(SafePointScalarObject, Type, 4)
 701       DEFINE_CLASS_ID(DecodeNarrowPtr, Type, 5)
 702         DEFINE_CLASS_ID(DecodeN, DecodeNarrowPtr, 0)
 703         DEFINE_CLASS_ID(DecodeNKlass, DecodeNarrowPtr, 1)
 704       DEFINE_CLASS_ID(EncodeNarrowPtr, Type, 6)
 705         DEFINE_CLASS_ID(EncodeP, EncodeNarrowPtr, 0)
 706         DEFINE_CLASS_ID(EncodePKlass, EncodeNarrowPtr, 1)
 707       DEFINE_CLASS_ID(Vector, Type, 7)
 708         DEFINE_CLASS_ID(VectorMaskCmp, Vector, 0)


 709 
 710     DEFINE_CLASS_ID(Proj,  Node, 3)
 711       DEFINE_CLASS_ID(CatchProj, Proj, 0)
 712       DEFINE_CLASS_ID(JumpProj,  Proj, 1)
 713       DEFINE_CLASS_ID(IfProj,    Proj, 2)
 714         DEFINE_CLASS_ID(IfTrue,    IfProj, 0)
 715         DEFINE_CLASS_ID(IfFalse,   IfProj, 1)
 716       DEFINE_CLASS_ID(Parm,      Proj, 4)
 717       DEFINE_CLASS_ID(MachProj,  Proj, 5)
 718 
 719     DEFINE_CLASS_ID(Mem, Node, 4)
 720       DEFINE_CLASS_ID(Load, Mem, 0)
 721         DEFINE_CLASS_ID(LoadVector,  Load, 0)
 722           DEFINE_CLASS_ID(LoadVectorGather, LoadVector, 0)
 723           DEFINE_CLASS_ID(LoadVectorMasked, LoadVector, 1)
 724       DEFINE_CLASS_ID(Store, Mem, 1)
 725         DEFINE_CLASS_ID(StoreVector, Store, 0)
 726           DEFINE_CLASS_ID(StoreVectorScatter, StoreVector, 0)
 727           DEFINE_CLASS_ID(StoreVectorMasked, StoreVector, 1)
 728       DEFINE_CLASS_ID(LoadStore, Mem, 2)

 759   };
 760   #undef DEFINE_CLASS_ID
 761 
 762   // Flags are sorted by usage frequency.
 763   enum NodeFlags {
 764     Flag_is_Copy                     = 1 << 0, // should be first bit to avoid shift
 765     Flag_rematerialize               = 1 << 1,
 766     Flag_needs_anti_dependence_check = 1 << 2,
 767     Flag_is_macro                    = 1 << 3,
 768     Flag_is_Con                      = 1 << 4,
 769     Flag_is_cisc_alternate           = 1 << 5,
 770     Flag_is_dead_loop_safe           = 1 << 6,
 771     Flag_may_be_short_branch         = 1 << 7,
 772     Flag_avoid_back_to_back_before   = 1 << 8,
 773     Flag_avoid_back_to_back_after    = 1 << 9,
 774     Flag_has_call                    = 1 << 10,
 775     Flag_is_reduction                = 1 << 11,
 776     Flag_is_scheduled                = 1 << 12,
 777     Flag_has_vector_mask_set         = 1 << 13,
 778     Flag_is_expensive                = 1 << 14,
 779     Flag_for_post_loop_opts_igvn     = 1 << 15,

 780     _last_flag                       = Flag_for_post_loop_opts_igvn
 781   };
 782 
 783   class PD;
 784 
 785 private:
 786   juint _class_id;
 787   juint _flags;
 788 
 789   static juint max_flags();
 790 
 791 protected:
 792   // These methods should be called from constructors only.
 793   void init_class_id(juint c) {
 794     _class_id = c; // cast out const
 795   }
 796   void init_flags(uint fl) {
 797     assert(fl <= max_flags(), "invalid node flag");
 798     _flags |= fl;
 799   }

 913   DEFINE_CLASS_QUERY(Mul)
 914   DEFINE_CLASS_QUERY(Multi)
 915   DEFINE_CLASS_QUERY(MultiBranch)
 916   DEFINE_CLASS_QUERY(Opaque1)
 917   DEFINE_CLASS_QUERY(OuterStripMinedLoop)
 918   DEFINE_CLASS_QUERY(OuterStripMinedLoopEnd)
 919   DEFINE_CLASS_QUERY(Parm)
 920   DEFINE_CLASS_QUERY(PCTable)
 921   DEFINE_CLASS_QUERY(Phi)
 922   DEFINE_CLASS_QUERY(Proj)
 923   DEFINE_CLASS_QUERY(Region)
 924   DEFINE_CLASS_QUERY(Root)
 925   DEFINE_CLASS_QUERY(SafePoint)
 926   DEFINE_CLASS_QUERY(SafePointScalarObject)
 927   DEFINE_CLASS_QUERY(Start)
 928   DEFINE_CLASS_QUERY(Store)
 929   DEFINE_CLASS_QUERY(Sub)
 930   DEFINE_CLASS_QUERY(SubTypeCheck)
 931   DEFINE_CLASS_QUERY(Type)
 932   DEFINE_CLASS_QUERY(Vector)



 933   DEFINE_CLASS_QUERY(LoadVector)
 934   DEFINE_CLASS_QUERY(LoadVectorGather)
 935   DEFINE_CLASS_QUERY(StoreVector)
 936   DEFINE_CLASS_QUERY(StoreVectorScatter)
 937   DEFINE_CLASS_QUERY(VectorMaskCmp)
 938   DEFINE_CLASS_QUERY(Unlock)
 939 
 940   #undef DEFINE_CLASS_QUERY
 941 
 942   // duplicate of is_MachSpillCopy()
 943   bool is_SpillCopy () const {
 944     return ((_class_id & ClassMask_MachSpillCopy) == Class_MachSpillCopy);
 945   }
 946 
 947   bool is_Con () const { return (_flags & Flag_is_Con) != 0; }
 948   // The data node which is safe to leave in dead loop during IGVN optimization.
 949   bool is_dead_loop_safe() const;
 950 
 951   // is_Copy() returns copied edge index (0 or 1)
 952   uint is_Copy() const { return (_flags & Flag_is_Copy); }
 953 
 954   virtual bool is_CFG() const { return false; }
 955 
 956   // If this node is control-dependent on a test, can it be
 957   // rerouted to a dominating equivalent test?  This is usually

 968   bool is_block_start() const {
 969     if ( is_Region() )
 970       return this == (const Node*)in(0);
 971     else
 972       return is_Start();
 973   }
 974 
 975   // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root,
 976   // Goto and Return.  This call also returns the block ending Node.
 977   virtual const Node *is_block_proj() const;
 978 
 979   // The node is a "macro" node which needs to be expanded before matching
 980   bool is_macro() const { return (_flags & Flag_is_macro) != 0; }
 981   // The node is expensive: the best control is set during loop opts
 982   bool is_expensive() const { return (_flags & Flag_is_expensive) != 0 && in(0) != NULL; }
 983 
 984   // An arithmetic node which accumulates a data in a loop.
 985   // It must have the loop's phi as input and provide a def to the phi.
 986   bool is_reduction() const { return (_flags & Flag_is_reduction) != 0; }
 987 


 988   // The node is a CountedLoopEnd with a mask annotation so as to emit a restore context
 989   bool has_vector_mask_set() const { return (_flags & Flag_has_vector_mask_set) != 0; }
 990 
 991   // Used in lcm to mark nodes that have scheduled
 992   bool is_scheduled() const { return (_flags & Flag_is_scheduled) != 0; }
 993 
 994   bool for_post_loop_opts_igvn() const { return (_flags & Flag_for_post_loop_opts_igvn) != 0; }
 995 
 996 //----------------- Optimization
 997 
 998   // Get the worst-case Type output for this Node.
 999   virtual const class Type *bottom_type() const;
1000 
1001   // If we find a better type for a node, try to record it permanently.
1002   // Return true if this node actually changed.
1003   // Be sure to do the hash_delete game in the "rehash" variant.
1004   void raise_bottom_type(const Type* new_type);
1005 
1006   // Get the address type with which this node uses and/or defs memory,
1007   // or NULL if none.  The address type is conservatively wide.

 154 class RegionNode;
 155 class RootNode;
 156 class SafePointNode;
 157 class SafePointScalarObjectNode;
 158 class StartNode;
 159 class State;
 160 class StoreNode;
 161 class SubNode;
 162 class SubTypeCheckNode;
 163 class Type;
 164 class TypeNode;
 165 class UnlockNode;
 166 class VectorNode;
 167 class LoadVectorNode;
 168 class LoadVectorMaskedNode;
 169 class StoreVectorMaskedNode;
 170 class LoadVectorGatherNode;
 171 class StoreVectorNode;
 172 class StoreVectorScatterNode;
 173 class VectorMaskCmpNode;
 174 class VectorUnboxNode;
 175 class VectorSet;
 176 class VectorReinterpretNode;
 177 
 178 // The type of all node counts and indexes.
 179 // It must hold at least 16 bits, but must also be fast to load and store.
 180 // This type, if less than 32 bits, could limit the number of possible nodes.
 181 // (To make this type platform-specific, move to globalDefinitions_xxx.hpp.)
 182 typedef unsigned int node_idx_t;
 183 
 184 
 185 #ifndef OPTO_DU_ITERATOR_ASSERT
 186 #ifdef ASSERT
 187 #define OPTO_DU_ITERATOR_ASSERT 1
 188 #else
 189 #define OPTO_DU_ITERATOR_ASSERT 0
 190 #endif
 191 #endif //OPTO_DU_ITERATOR_ASSERT
 192 
 193 #if OPTO_DU_ITERATOR_ASSERT
 194 class DUIterator;
 195 class DUIterator_Fast;
 196 class DUIterator_Last;

 691 
 692     DEFINE_CLASS_ID(Type,  Node, 2)
 693       DEFINE_CLASS_ID(Phi,   Type, 0)
 694       DEFINE_CLASS_ID(ConstraintCast, Type, 1)
 695         DEFINE_CLASS_ID(CastII, ConstraintCast, 0)
 696         DEFINE_CLASS_ID(CheckCastPP, ConstraintCast, 1)
 697         DEFINE_CLASS_ID(CastLL, ConstraintCast, 2)
 698         DEFINE_CLASS_ID(CastFF, ConstraintCast, 3)
 699         DEFINE_CLASS_ID(CastDD, ConstraintCast, 4)
 700         DEFINE_CLASS_ID(CastVV, ConstraintCast, 5)
 701       DEFINE_CLASS_ID(CMove, Type, 3)
 702       DEFINE_CLASS_ID(SafePointScalarObject, Type, 4)
 703       DEFINE_CLASS_ID(DecodeNarrowPtr, Type, 5)
 704         DEFINE_CLASS_ID(DecodeN, DecodeNarrowPtr, 0)
 705         DEFINE_CLASS_ID(DecodeNKlass, DecodeNarrowPtr, 1)
 706       DEFINE_CLASS_ID(EncodeNarrowPtr, Type, 6)
 707         DEFINE_CLASS_ID(EncodeP, EncodeNarrowPtr, 0)
 708         DEFINE_CLASS_ID(EncodePKlass, EncodeNarrowPtr, 1)
 709       DEFINE_CLASS_ID(Vector, Type, 7)
 710         DEFINE_CLASS_ID(VectorMaskCmp, Vector, 0)
 711         DEFINE_CLASS_ID(VectorUnbox, Vector, 1)
 712         DEFINE_CLASS_ID(VectorReinterpret, Vector, 2)
 713 
 714     DEFINE_CLASS_ID(Proj,  Node, 3)
 715       DEFINE_CLASS_ID(CatchProj, Proj, 0)
 716       DEFINE_CLASS_ID(JumpProj,  Proj, 1)
 717       DEFINE_CLASS_ID(IfProj,    Proj, 2)
 718         DEFINE_CLASS_ID(IfTrue,    IfProj, 0)
 719         DEFINE_CLASS_ID(IfFalse,   IfProj, 1)
 720       DEFINE_CLASS_ID(Parm,      Proj, 4)
 721       DEFINE_CLASS_ID(MachProj,  Proj, 5)
 722 
 723     DEFINE_CLASS_ID(Mem, Node, 4)
 724       DEFINE_CLASS_ID(Load, Mem, 0)
 725         DEFINE_CLASS_ID(LoadVector,  Load, 0)
 726           DEFINE_CLASS_ID(LoadVectorGather, LoadVector, 0)
 727           DEFINE_CLASS_ID(LoadVectorMasked, LoadVector, 1)
 728       DEFINE_CLASS_ID(Store, Mem, 1)
 729         DEFINE_CLASS_ID(StoreVector, Store, 0)
 730           DEFINE_CLASS_ID(StoreVectorScatter, StoreVector, 0)
 731           DEFINE_CLASS_ID(StoreVectorMasked, StoreVector, 1)
 732       DEFINE_CLASS_ID(LoadStore, Mem, 2)

 763   };
 764   #undef DEFINE_CLASS_ID
 765 
 766   // Flags are sorted by usage frequency.
 767   enum NodeFlags {
 768     Flag_is_Copy                     = 1 << 0, // should be first bit to avoid shift
 769     Flag_rematerialize               = 1 << 1,
 770     Flag_needs_anti_dependence_check = 1 << 2,
 771     Flag_is_macro                    = 1 << 3,
 772     Flag_is_Con                      = 1 << 4,
 773     Flag_is_cisc_alternate           = 1 << 5,
 774     Flag_is_dead_loop_safe           = 1 << 6,
 775     Flag_may_be_short_branch         = 1 << 7,
 776     Flag_avoid_back_to_back_before   = 1 << 8,
 777     Flag_avoid_back_to_back_after    = 1 << 9,
 778     Flag_has_call                    = 1 << 10,
 779     Flag_is_reduction                = 1 << 11,
 780     Flag_is_scheduled                = 1 << 12,
 781     Flag_has_vector_mask_set         = 1 << 13,
 782     Flag_is_expensive                = 1 << 14,
 783     Flag_is_predicated_vector        = 1 << 15,
 784     Flag_for_post_loop_opts_igvn     = 1 << 16,
 785     _last_flag                       = Flag_for_post_loop_opts_igvn
 786   };
 787 
 788   class PD;
 789 
 790 private:
 791   juint _class_id;
 792   juint _flags;
 793 
 794   static juint max_flags();
 795 
 796 protected:
 797   // These methods should be called from constructors only.
 798   void init_class_id(juint c) {
 799     _class_id = c; // cast out const
 800   }
 801   void init_flags(uint fl) {
 802     assert(fl <= max_flags(), "invalid node flag");
 803     _flags |= fl;
 804   }

 918   DEFINE_CLASS_QUERY(Mul)
 919   DEFINE_CLASS_QUERY(Multi)
 920   DEFINE_CLASS_QUERY(MultiBranch)
 921   DEFINE_CLASS_QUERY(Opaque1)
 922   DEFINE_CLASS_QUERY(OuterStripMinedLoop)
 923   DEFINE_CLASS_QUERY(OuterStripMinedLoopEnd)
 924   DEFINE_CLASS_QUERY(Parm)
 925   DEFINE_CLASS_QUERY(PCTable)
 926   DEFINE_CLASS_QUERY(Phi)
 927   DEFINE_CLASS_QUERY(Proj)
 928   DEFINE_CLASS_QUERY(Region)
 929   DEFINE_CLASS_QUERY(Root)
 930   DEFINE_CLASS_QUERY(SafePoint)
 931   DEFINE_CLASS_QUERY(SafePointScalarObject)
 932   DEFINE_CLASS_QUERY(Start)
 933   DEFINE_CLASS_QUERY(Store)
 934   DEFINE_CLASS_QUERY(Sub)
 935   DEFINE_CLASS_QUERY(SubTypeCheck)
 936   DEFINE_CLASS_QUERY(Type)
 937   DEFINE_CLASS_QUERY(Vector)
 938   DEFINE_CLASS_QUERY(VectorMaskCmp)
 939   DEFINE_CLASS_QUERY(VectorUnbox)
 940   DEFINE_CLASS_QUERY(VectorReinterpret);
 941   DEFINE_CLASS_QUERY(LoadVector)
 942   DEFINE_CLASS_QUERY(LoadVectorGather)
 943   DEFINE_CLASS_QUERY(StoreVector)
 944   DEFINE_CLASS_QUERY(StoreVectorScatter)

 945   DEFINE_CLASS_QUERY(Unlock)
 946 
 947   #undef DEFINE_CLASS_QUERY
 948 
 949   // duplicate of is_MachSpillCopy()
 950   bool is_SpillCopy () const {
 951     return ((_class_id & ClassMask_MachSpillCopy) == Class_MachSpillCopy);
 952   }
 953 
 954   bool is_Con () const { return (_flags & Flag_is_Con) != 0; }
 955   // The data node which is safe to leave in dead loop during IGVN optimization.
 956   bool is_dead_loop_safe() const;
 957 
 958   // is_Copy() returns copied edge index (0 or 1)
 959   uint is_Copy() const { return (_flags & Flag_is_Copy); }
 960 
 961   virtual bool is_CFG() const { return false; }
 962 
 963   // If this node is control-dependent on a test, can it be
 964   // rerouted to a dominating equivalent test?  This is usually

 975   bool is_block_start() const {
 976     if ( is_Region() )
 977       return this == (const Node*)in(0);
 978     else
 979       return is_Start();
 980   }
 981 
 982   // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root,
 983   // Goto and Return.  This call also returns the block ending Node.
 984   virtual const Node *is_block_proj() const;
 985 
 986   // The node is a "macro" node which needs to be expanded before matching
 987   bool is_macro() const { return (_flags & Flag_is_macro) != 0; }
 988   // The node is expensive: the best control is set during loop opts
 989   bool is_expensive() const { return (_flags & Flag_is_expensive) != 0 && in(0) != NULL; }
 990 
 991   // An arithmetic node which accumulates a data in a loop.
 992   // It must have the loop's phi as input and provide a def to the phi.
 993   bool is_reduction() const { return (_flags & Flag_is_reduction) != 0; }
 994 
 995   bool is_predicated_vector() const { return (_flags & Flag_is_predicated_vector) != 0; }
 996 
 997   // The node is a CountedLoopEnd with a mask annotation so as to emit a restore context
 998   bool has_vector_mask_set() const { return (_flags & Flag_has_vector_mask_set) != 0; }
 999 
1000   // Used in lcm to mark nodes that have scheduled
1001   bool is_scheduled() const { return (_flags & Flag_is_scheduled) != 0; }
1002 
1003   bool for_post_loop_opts_igvn() const { return (_flags & Flag_for_post_loop_opts_igvn) != 0; }
1004 
1005 //----------------- Optimization
1006 
1007   // Get the worst-case Type output for this Node.
1008   virtual const class Type *bottom_type() const;
1009 
1010   // If we find a better type for a node, try to record it permanently.
1011   // Return true if this node actually changed.
1012   // Be sure to do the hash_delete game in the "rehash" variant.
1013   void raise_bottom_type(const Type* new_type);
1014 
1015   // Get the address type with which this node uses and/or defs memory,
1016   // or NULL if none.  The address type is conservatively wide.
< prev index next >