< prev index next >

src/hotspot/share/opto/callnode.hpp

Print this page

        

@@ -78,11 +78,11 @@
   virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
   virtual const Type* Value(PhaseGVN* phase) const;
   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
   virtual void  calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;
   virtual const RegMask &in_RegMask(uint) const;
-  virtual Node *match( const ProjNode *proj, const Matcher *m );
+  virtual Node *match(const ProjNode *proj, const Matcher *m, const RegMask* mask);
   virtual uint ideal_reg() const { return 0; }
 #ifndef PRODUCT
   virtual void  dump_spec(outputStream *st) const;
   virtual void  dump_compact_spec(outputStream *st) const;
 #endif

@@ -540,21 +540,39 @@
 };
 
 
 // Simple container for the outgoing projections of a call.  Useful
 // for serious surgery on calls.
-class CallProjections : public StackObj {
+class CallProjections {
 public:
   Node* fallthrough_proj;
   Node* fallthrough_catchproj;
   Node* fallthrough_memproj;
   Node* fallthrough_ioproj;
   Node* catchall_catchproj;
   Node* catchall_memproj;
   Node* catchall_ioproj;
-  Node* resproj;
   Node* exobj;
+  uint nb_resproj;
+  Node* resproj[1]; // at least one projection
+
+  CallProjections(uint nbres) {
+    fallthrough_proj      = NULL;
+    fallthrough_catchproj = NULL;
+    fallthrough_memproj   = NULL;
+    fallthrough_ioproj    = NULL;
+    catchall_catchproj    = NULL;
+    catchall_memproj      = NULL;
+    catchall_ioproj       = NULL;
+    exobj                 = NULL;
+    nb_resproj            = nbres;
+    resproj[0]            = NULL;
+    for (uint i = 1; i < nb_resproj; i++) {
+      resproj[i]          = NULL;
+    }
+  }
+
 };
 
 class CallGenerator;
 
 //------------------------------CallNode---------------------------------------

@@ -572,11 +590,11 @@
   float        _cnt;          // Estimate of number of times called
   CallGenerator* _generator;  // corresponding CallGenerator for some late inline calls
   const char *_name;           // Printable name, if _method is NULL
 
   CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
-    : SafePointNode(tf->domain()->cnt(), NULL, adr_type),
+    : SafePointNode(tf->domain_cc()->cnt(), NULL, adr_type),
       _tf(tf),
       _entry_point(addr),
       _cnt(COUNT_UNKNOWN),
       _generator(NULL),
       _name(NULL)

@@ -599,11 +617,11 @@
   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
   virtual Node* Identity(PhaseGVN* phase) { return this; }
   virtual bool        cmp( const Node &n ) const;
   virtual uint        size_of() const = 0;
   virtual void        calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
-  virtual Node       *match( const ProjNode *proj, const Matcher *m );
+  virtual Node       *match(const ProjNode *proj, const Matcher *m, const RegMask* mask);
   virtual uint        ideal_reg() const { return NotAMachineReg; }
   // Are we guaranteed that this node is a safepoint?  Not true for leaf calls and
   // for some macro nodes whose expansion does not have a safepoint on the fast path.
   virtual bool        guaranteed_safepoint()  { return true; }
   // For macro nodes, the JVMState gets modified during expansion. If calls

@@ -618,25 +636,27 @@
 
   // Returns true if the call may modify n
   virtual bool        may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase);
   // Does this node have a use of n other than in debug information?
   bool                has_non_debug_use(Node *n);
+  bool                has_debug_use(Node *n);
   // Returns the unique CheckCastPP of a call
   // or result projection is there are several CheckCastPP
   // or returns NULL if there is no one.
   Node *result_cast();
   // Does this node returns pointer?
   bool returns_pointer() const {
-    const TypeTuple *r = tf()->range();
-    return (r->cnt() > TypeFunc::Parms &&
+    const TypeTuple *r = tf()->range_sig();
+    return (!tf()->returns_value_type_as_fields() &&
+            r->cnt() > TypeFunc::Parms &&
             r->field_at(TypeFunc::Parms)->isa_ptr());
   }
 
   // Collect all the interesting edges from a call for use in
   // replacing the call by something else.  Used by macro expansion
   // and the late inlining support.
-  void extract_projections(CallProjections* projs, bool separate_io_proj, bool do_asserts = true);
+  CallProjections* extract_projections(bool separate_io_proj, bool do_asserts = true);
 
   virtual uint match_edge(uint idx) const;
 
   bool is_call_to_arraycopystub() const;
 

@@ -704,10 +724,22 @@
     init_class_id(Class_CallStaticJava);
     if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
       init_flags(Flag_is_macro);
       C->add_macro_node(this);
     }
+    const TypeTuple *r = tf->range_sig();
+    if (ValueTypeReturnedAsFields &&
+        method != NULL &&
+        method->is_method_handle_intrinsic() &&
+        r->cnt() > TypeFunc::Parms &&
+        r->field_at(TypeFunc::Parms)->isa_oopptr() &&
+        r->field_at(TypeFunc::Parms)->is_oopptr()->can_be_value_type()) {
+      // Make sure this call is processed by PhaseMacroExpand::expand_mh_intrinsic_return
+      init_flags(Flag_is_macro);
+      C->add_macro_node(this);
+    }
+
     _is_scalar_replaceable = false;
     _is_non_escaping = false;
   }
   CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
                      const TypePtr* adr_type)

@@ -813,10 +845,11 @@
                    const TypePtr* adr_type)
     : CallLeafNode(tf, addr, name, adr_type)
   {
   }
   virtual int   Opcode() const;
+  virtual uint match_edge(uint idx) const;
 };
 
 
 //------------------------------Allocate---------------------------------------
 // High-level memory allocation

@@ -836,19 +869,27 @@
     // Inputs:
     AllocSize   = TypeFunc::Parms,    // size (in bytes) of the new object
     KlassNode,                        // type (maybe dynamic) of the obj.
     InitialTest,                      // slow-path test (may be constant)
     ALength,                          // array length (or TOP if none)
+    ValueNode,
+    DefaultValue,                     // default value in case of non flattened value array
+    RawDefaultValue,                  // same as above but as raw machine word
+    StorageProperties,                // storage properties for arrays
     ParmLimit
   };
 
   static const TypeFunc* alloc_type(const Type* t) {
     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
     fields[AllocSize]   = TypeInt::POS;
     fields[KlassNode]   = TypeInstPtr::NOTNULL;
     fields[InitialTest] = TypeInt::BOOL;
     fields[ALength]     = t;  // length (can be a bad length)
+    fields[ValueNode]   = Type::BOTTOM;
+    fields[DefaultValue] = TypeInstPtr::NOTNULL;
+    fields[RawDefaultValue] = TypeX_X;
+    fields[StorageProperties] = TypeX_X;
 
     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
 
     // create result type (range)
     fields = TypeTuple::fields(1);

@@ -862,14 +903,16 @@
   // Result of Escape Analysis
   bool _is_scalar_replaceable;
   bool _is_non_escaping;
   // True when MemBar for new is redundant with MemBar at initialzer exit
   bool _is_allocation_MemBar_redundant;
+  bool _larval;
 
   virtual uint size_of() const; // Size is bigger
   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
-               Node *size, Node *klass_node, Node *initial_test);
+               Node *size, Node *klass_node, Node *initial_test,
+               ValueTypeBaseNode* value_node = NULL);
   // Expansion modifies the JVMState, so we need to clone it
   virtual void  clone_jvms(Compile* C) {
     if (jvms() != NULL) {
       set_jvms(jvms()->clone_deep(C));
       jvms()->set_map_deep(this);

@@ -877,10 +920,12 @@
   }
   virtual int Opcode() const;
   virtual uint ideal_reg() const { return Op_RegP; }
   virtual bool        guaranteed_safepoint()  { return false; }
 
+  virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
+
   // allocations do not modify their arguments
   virtual bool        may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
 
   // Pattern-match a possible usage of AllocateNode.
   // Return null if no allocation is recognized.

@@ -934,27 +979,30 @@
   // inserted at exit of its <.init>, memory barrier for new is not necessary.
   // Inovke this method when MemBar at exit of initializer and post-dominate
   // allocation node.
   void compute_MemBar_redundancy(ciMethod* initializer);
   bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
+
+  Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem, Node* klass_node);
 };
 
 //------------------------------AllocateArray---------------------------------
 //
 // High-level array allocation
 //
 class AllocateArrayNode : public AllocateNode {
 public:
   AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
                     Node* size, Node* klass_node, Node* initial_test,
-                    Node* count_val
-                    )
-    : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
-                   initial_test)
+                    Node* count_val, Node* default_value, Node* raw_default_value, Node* storage_properties)
+    : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node, initial_test)
   {
     init_class_id(Class_AllocateArray);
     set_req(AllocateNode::ALength,        count_val);
+    init_req(AllocateNode::DefaultValue,  default_value);
+    init_req(AllocateNode::RawDefaultValue, raw_default_value);
+    init_req(AllocateNode::StorageProperties, storage_properties);
   }
   virtual int Opcode() const;
   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 
   // Dig the length operand out of a array allocation site.

@@ -1070,11 +1118,11 @@
     // create result type (range)
     fields = TypeTuple::fields(0);
 
     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
 
-    return TypeFunc::make(domain,range);
+    return TypeFunc::make(domain, range);
   }
 
   virtual int Opcode() const;
   virtual uint size_of() const; // Size is bigger
   LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
< prev index next >