< prev index next >

src/hotspot/share/opto/callnode.hpp

Print this page

  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OPTO_CALLNODE_HPP
  26 #define SHARE_OPTO_CALLNODE_HPP
  27 
  28 #include "opto/connode.hpp"
  29 #include "opto/mulnode.hpp"
  30 #include "opto/multnode.hpp"
  31 #include "opto/opcodes.hpp"

  32 #include "opto/phaseX.hpp"
  33 #include "opto/replacednodes.hpp"
  34 #include "opto/type.hpp"
  35 #include "utilities/growableArray.hpp"
  36 
  37 // Portions of code courtesy of Clifford Click
  38 
  39 // Optimization - Graph Style
  40 
  41 class NamedCounter;
  42 class MultiNode;
  43 class  SafePointNode;
  44 class   CallNode;
  45 class     CallJavaNode;
  46 class       CallStaticJavaNode;
  47 class       CallDynamicJavaNode;
  48 class     CallRuntimeNode;
  49 class       CallLeafNode;
  50 class         CallLeafNoFPNode;
  51 class         CallLeafVectorNode;

 191 public:
 192   typedef enum {
 193     Reexecute_Undefined = -1, // not defined -- will be translated into false later
 194     Reexecute_False     =  0, // false       -- do not reexecute
 195     Reexecute_True      =  1  // true        -- reexecute the bytecode
 196   } ReexecuteState; //Reexecute State
 197 
 198 private:
 199   JVMState*         _caller;    // List pointer for forming scope chains
 200   uint              _depth;     // One more than caller depth, or one.
 201   uint              _locoff;    // Offset to locals in input edge mapping
 202   uint              _stkoff;    // Offset to stack in input edge mapping
 203   uint              _monoff;    // Offset to monitors in input edge mapping
 204   uint              _scloff;    // Offset to fields of scalar objs in input edge mapping
 205   uint              _endoff;    // Offset to end of input edge mapping
 206   uint              _sp;        // Java Expression Stack Pointer for this state
 207   int               _bci;       // Byte Code Index of this JVM point
 208   ReexecuteState    _reexecute; // Whether this bytecode need to be re-executed
 209   ciMethod*         _method;    // Method Pointer
 210   SafePointNode*    _map;       // Map node associated with this scope


 211 public:
 212   friend class Compile;
 213   friend class PreserveReexecuteState;
 214 
 215   // Because JVMState objects live over the entire lifetime of the
 216   // Compile object, they are allocated into the comp_arena, which
 217   // does not get resource marked or reset during the compile process
 218   void *operator new( size_t x, Compile* C ) throw() { return C->comp_arena()->Amalloc(x); }
 219   void operator delete( void * ) { } // fast deallocation
 220 
 221   // Create a new JVMState, ready for abstract interpretation.
 222   JVMState(ciMethod* method, JVMState* caller);
 223   JVMState(int stack_size);  // root state; has a null method
 224 
 225   // Access functions for the JVM
 226   // ... --|--- loc ---|--- stk ---|--- arg ---|--- mon ---|--- scl ---|
 227   //       \ locoff    \ stkoff    \ argoff    \ monoff    \ scloff    \ endoff
 228   uint              locoff() const { return _locoff; }
 229   uint              stkoff() const { return _stkoff; }
 230   uint              argoff() const { return _stkoff + _sp; }

 284   void              set_stkoff(uint off) { _stkoff = off; }
 285   void              set_monoff(uint off) { _monoff = off; }
 286   void              set_scloff(uint off) { _scloff = off; }
 287   void              set_endoff(uint off) { _endoff = off; }
 288   void              set_offsets(uint off) {
 289     _locoff = _stkoff = _monoff = _scloff = _endoff = off;
 290   }
 291   void              set_map(SafePointNode* map) { _map = map; }
 292   void              bind_map(SafePointNode* map); // set_map() and set_jvms() for the SafePointNode
 293   void              set_sp(uint sp) { _sp = sp; }
 294                     // _reexecute is initialized to "undefined" for a new bci
 295   void              set_bci(int bci) {if(_bci != bci)_reexecute=Reexecute_Undefined; _bci = bci; }
 296   void              set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;}
 297 
 298   // Miscellaneous utility functions
 299   JVMState* clone_deep(Compile* C) const;    // recursively clones caller chain
 300   JVMState* clone_shallow(Compile* C) const; // retains uncloned caller
 301   void      set_map_deep(SafePointNode *map);// reset map for all callers
 302   void      adapt_position(int delta);       // Adapt offsets in in-array after adding an edge.
 303   int       interpreter_frame_size() const;

 304 
 305 #ifndef PRODUCT
 306   void      print_method_with_lineno(outputStream* st, bool show_name) const;
 307   void      format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;
 308   void      dump_spec(outputStream *st) const;
 309   void      dump_on(outputStream* st) const;
 310   void      dump() const {
 311     dump_on(tty);
 312   }
 313 #endif
 314 };
 315 
 316 //------------------------------SafePointNode----------------------------------
 317 // A SafePointNode is a subclass of a MultiNode for convenience (and
 318 // potential code sharing) only - conceptually it is independent of
 319 // the Node semantics.
 320 class SafePointNode : public MultiNode {
 321   friend JVMState;
 322   friend class GraphKit;
 323   friend class VMStructs;

 932                    const TypePtr* adr_type, uint num_bits)
 933     : CallLeafNode(tf, addr, name, adr_type), _num_bits(num_bits)
 934   {
 935   }
 936   virtual int   Opcode() const;
 937   virtual void  calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
 938 };
 939 
 940 
 941 //------------------------------Allocate---------------------------------------
 942 // High-level memory allocation
 943 //
 944 //  AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
 945 //  get expanded into a code sequence containing a call.  Unlike other CallNodes,
 946 //  they have 2 memory projections and 2 i_o projections (which are distinguished by
 947 //  the _is_io_use flag in the projection.)  This is needed when expanding the node in
 948 //  order to differentiate the uses of the projection on the normal control path from
 949 //  those on the exception return path.
 950 //
 951 class AllocateNode : public CallNode {



 952 public:
 953   enum {
 954     // Output:
 955     RawAddress  = TypeFunc::Parms,    // the newly-allocated raw address
 956     // Inputs:
 957     AllocSize   = TypeFunc::Parms,    // size (in bytes) of the new object
 958     KlassNode,                        // type (maybe dynamic) of the obj.
 959     InitialTest,                      // slow-path test (may be constant)
 960     ALength,                          // array length (or TOP if none)
 961     ValidLengthTest,
 962     ParmLimit
 963   };
 964 
 965   static const TypeFunc* alloc_type(const Type* t) {
 966     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
 967     fields[AllocSize]   = TypeInt::POS;
 968     fields[KlassNode]   = TypeInstPtr::NOTNULL;
 969     fields[InitialTest] = TypeInt::BOOL;
 970     fields[ALength]     = t;  // length (can be a bad length)
 971     fields[ValidLengthTest] = TypeInt::BOOL;

1035   // Convenience for initialization->maybe_set_complete(phase)
1036   bool maybe_set_complete(PhaseGVN* phase);
1037 
1038   // Return true if allocation doesn't escape thread, its escape state
1039   // needs be noEscape or ArgEscape. InitializeNode._does_not_escape
1040   // is true when its allocation's escape state is noEscape or
1041   // ArgEscape. In case allocation's InitializeNode is null, check
1042   // AlllocateNode._is_non_escaping flag.
1043   // AlllocateNode._is_non_escaping is true when its escape state is
1044   // noEscape.
1045   bool does_not_escape_thread() {
1046     InitializeNode* init = nullptr;
1047     return _is_non_escaping || (((init = initialization()) != nullptr) && init->does_not_escape());
1048   }
1049 
1050   // If object doesn't escape in <.init> method and there is memory barrier
1051   // inserted at exit of its <.init>, memory barrier for new is not necessary.
1052   // Inovke this method when MemBar at exit of initializer and post-dominate
1053   // allocation node.
1054   void compute_MemBar_redundancy(ciMethod* initializer);
1055   bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
1056 
1057   Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem);










1058 };
1059 
1060 //------------------------------AllocateArray---------------------------------
1061 //
1062 // High-level array allocation
1063 //
1064 class AllocateArrayNode : public AllocateNode {
1065 public:
1066   AllocateArrayNode(Compile* C, const TypeFunc* atype, Node* ctrl, Node* mem, Node* abio, Node* size, Node* klass_node,
1067                     Node* initial_test, Node* count_val, Node* valid_length_test)
1068     : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
1069                    initial_test)
1070   {
1071     init_class_id(Class_AllocateArray);
1072     set_req(AllocateNode::ALength,        count_val);
1073     set_req(AllocateNode::ValidLengthTest, valid_length_test);
1074   }
1075   virtual int Opcode() const;
1076 
1077   // Dig the length operand out of a array allocation site.

  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OPTO_CALLNODE_HPP
  26 #define SHARE_OPTO_CALLNODE_HPP
  27 
  28 #include "opto/connode.hpp"
  29 #include "opto/mulnode.hpp"
  30 #include "opto/multnode.hpp"
  31 #include "opto/opcodes.hpp"
  32 #include "opto/partialEscape.hpp"
  33 #include "opto/phaseX.hpp"
  34 #include "opto/replacednodes.hpp"
  35 #include "opto/type.hpp"
  36 #include "utilities/growableArray.hpp"
  37 
  38 // Portions of code courtesy of Clifford Click
  39 
  40 // Optimization - Graph Style
  41 
  42 class NamedCounter;
  43 class MultiNode;
  44 class  SafePointNode;
  45 class   CallNode;
  46 class     CallJavaNode;
  47 class       CallStaticJavaNode;
  48 class       CallDynamicJavaNode;
  49 class     CallRuntimeNode;
  50 class       CallLeafNode;
  51 class         CallLeafNoFPNode;
  52 class         CallLeafVectorNode;

 192 public:
 193   typedef enum {
 194     Reexecute_Undefined = -1, // not defined -- will be translated into false later
 195     Reexecute_False     =  0, // false       -- do not reexecute
 196     Reexecute_True      =  1  // true        -- reexecute the bytecode
 197   } ReexecuteState; //Reexecute State
 198 
 199 private:
 200   JVMState*         _caller;    // List pointer for forming scope chains
 201   uint              _depth;     // One more than caller depth, or one.
 202   uint              _locoff;    // Offset to locals in input edge mapping
 203   uint              _stkoff;    // Offset to stack in input edge mapping
 204   uint              _monoff;    // Offset to monitors in input edge mapping
 205   uint              _scloff;    // Offset to fields of scalar objs in input edge mapping
 206   uint              _endoff;    // Offset to end of input edge mapping
 207   uint              _sp;        // Java Expression Stack Pointer for this state
 208   int               _bci;       // Byte Code Index of this JVM point
 209   ReexecuteState    _reexecute; // Whether this bytecode need to be re-executed
 210   ciMethod*         _method;    // Method Pointer
 211   SafePointNode*    _map;       // Map node associated with this scope
 212   PEAState          _alloc_state;
 213 
 214 public:
 215   friend class Compile;
 216   friend class PreserveReexecuteState;
 217 
 218   // Because JVMState objects live over the entire lifetime of the
 219   // Compile object, they are allocated into the comp_arena, which
 220   // does not get resource marked or reset during the compile process
 221   void *operator new( size_t x, Compile* C ) throw() { return C->comp_arena()->Amalloc(x); }
 222   void operator delete( void * ) { } // fast deallocation
 223 
 224   // Create a new JVMState, ready for abstract interpretation.
 225   JVMState(ciMethod* method, JVMState* caller);
 226   JVMState(int stack_size);  // root state; has a null method
 227 
 228   // Access functions for the JVM
 229   // ... --|--- loc ---|--- stk ---|--- arg ---|--- mon ---|--- scl ---|
 230   //       \ locoff    \ stkoff    \ argoff    \ monoff    \ scloff    \ endoff
 231   uint              locoff() const { return _locoff; }
 232   uint              stkoff() const { return _stkoff; }
 233   uint              argoff() const { return _stkoff + _sp; }

 287   void              set_stkoff(uint off) { _stkoff = off; }
 288   void              set_monoff(uint off) { _monoff = off; }
 289   void              set_scloff(uint off) { _scloff = off; }
 290   void              set_endoff(uint off) { _endoff = off; }
 291   void              set_offsets(uint off) {
 292     _locoff = _stkoff = _monoff = _scloff = _endoff = off;
 293   }
 294   void              set_map(SafePointNode* map) { _map = map; }
 295   void              bind_map(SafePointNode* map); // set_map() and set_jvms() for the SafePointNode
 296   void              set_sp(uint sp) { _sp = sp; }
 297                     // _reexecute is initialized to "undefined" for a new bci
 298   void              set_bci(int bci) {if(_bci != bci)_reexecute=Reexecute_Undefined; _bci = bci; }
 299   void              set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;}
 300 
 301   // Miscellaneous utility functions
 302   JVMState* clone_deep(Compile* C) const;    // recursively clones caller chain
 303   JVMState* clone_shallow(Compile* C) const; // retains uncloned caller
 304   void      set_map_deep(SafePointNode *map);// reset map for all callers
 305   void      adapt_position(int delta);       // Adapt offsets in in-array after adding an edge.
 306   int       interpreter_frame_size() const;
 307   PEAState& alloc_state() { return _alloc_state; }
 308 
 309 #ifndef PRODUCT
 310   void      print_method_with_lineno(outputStream* st, bool show_name) const;
 311   void      format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;
 312   void      dump_spec(outputStream *st) const;
 313   void      dump_on(outputStream* st) const;
 314   void      dump() const {
 315     dump_on(tty);
 316   }
 317 #endif
 318 };
 319 
 320 //------------------------------SafePointNode----------------------------------
 321 // A SafePointNode is a subclass of a MultiNode for convenience (and
 322 // potential code sharing) only - conceptually it is independent of
 323 // the Node semantics.
 324 class SafePointNode : public MultiNode {
 325   friend JVMState;
 326   friend class GraphKit;
 327   friend class VMStructs;

 936                    const TypePtr* adr_type, uint num_bits)
 937     : CallLeafNode(tf, addr, name, adr_type), _num_bits(num_bits)
 938   {
 939   }
 940   virtual int   Opcode() const;
 941   virtual void  calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
 942 };
 943 
 944 
 945 //------------------------------Allocate---------------------------------------
 946 // High-level memory allocation
 947 //
 948 //  AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
 949 //  get expanded into a code sequence containing a call.  Unlike other CallNodes,
 950 //  they have 2 memory projections and 2 i_o projections (which are distinguished by
 951 //  the _is_io_use flag in the projection.)  This is needed when expanding the node in
 952 //  order to differentiate the uses of the projection on the normal control path from
 953 //  those on the exception return path.
 954 //
 955 class AllocateNode : public CallNode {
 956 private:
 957   int _materialized; // materializaiton counts by PEA
 958 
 959 public:
 960   enum {
 961     // Output:
 962     RawAddress  = TypeFunc::Parms,    // the newly-allocated raw address
 963     // Inputs:
 964     AllocSize   = TypeFunc::Parms,    // size (in bytes) of the new object
 965     KlassNode,                        // type (maybe dynamic) of the obj.
 966     InitialTest,                      // slow-path test (may be constant)
 967     ALength,                          // array length (or TOP if none)
 968     ValidLengthTest,
 969     ParmLimit
 970   };
 971 
 972   static const TypeFunc* alloc_type(const Type* t) {
 973     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
 974     fields[AllocSize]   = TypeInt::POS;
 975     fields[KlassNode]   = TypeInstPtr::NOTNULL;
 976     fields[InitialTest] = TypeInt::BOOL;
 977     fields[ALength]     = t;  // length (can be a bad length)
 978     fields[ValidLengthTest] = TypeInt::BOOL;

1042   // Convenience for initialization->maybe_set_complete(phase)
1043   bool maybe_set_complete(PhaseGVN* phase);
1044 
1045   // Return true if allocation doesn't escape thread, its escape state
1046   // needs be noEscape or ArgEscape. InitializeNode._does_not_escape
1047   // is true when its allocation's escape state is noEscape or
1048   // ArgEscape. In case allocation's InitializeNode is null, check
1049   // AlllocateNode._is_non_escaping flag.
1050   // AlllocateNode._is_non_escaping is true when its escape state is
1051   // noEscape.
1052   bool does_not_escape_thread() {
1053     InitializeNode* init = nullptr;
1054     return _is_non_escaping || (((init = initialization()) != nullptr) && init->does_not_escape());
1055   }
1056 
1057   // If object doesn't escape in <.init> method and there is memory barrier
1058   // inserted at exit of its <.init>, memory barrier for new is not necessary.
1059   // Inovke this method when MemBar at exit of initializer and post-dominate
1060   // allocation node.
1061   void compute_MemBar_redundancy(ciMethod* initializer);
1062   bool is_allocation_MemBar_redundant() const { return _is_allocation_MemBar_redundant; }
1063 
1064   Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem);
1065 
1066   const TypeOopPtr* oop_type(const PhaseValues& phase) const;
1067 
1068   void inc_materialized() {
1069     _materialized++;
1070   }
1071 
1072   int materialized_cnt() const {
1073     return _materialized;
1074   }
1075 };
1076 
1077 //------------------------------AllocateArray---------------------------------
1078 //
1079 // High-level array allocation
1080 //
1081 class AllocateArrayNode : public AllocateNode {
1082 public:
1083   AllocateArrayNode(Compile* C, const TypeFunc* atype, Node* ctrl, Node* mem, Node* abio, Node* size, Node* klass_node,
1084                     Node* initial_test, Node* count_val, Node* valid_length_test)
1085     : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
1086                    initial_test)
1087   {
1088     init_class_id(Class_AllocateArray);
1089     set_req(AllocateNode::ALength,        count_val);
1090     set_req(AllocateNode::ValidLengthTest, valid_length_test);
1091   }
1092   virtual int Opcode() const;
1093 
1094   // Dig the length operand out of a array allocation site.
< prev index next >