< prev index next >

src/hotspot/share/opto/castnode.cpp

Print this page
@@ -25,12 +25,15 @@
  #include "precompiled.hpp"
  #include "opto/addnode.hpp"
  #include "opto/callnode.hpp"
  #include "opto/castnode.hpp"
  #include "opto/connode.hpp"
+ #include "opto/graphKit.hpp"
+ #include "opto/inlinetypenode.hpp"
  #include "opto/matcher.hpp"
  #include "opto/phaseX.hpp"
+ #include "opto/rootnode.hpp"
  #include "opto/subnode.hpp"
  #include "opto/type.hpp"
  
  //=============================================================================
  // If input is already higher or equal to cast type, then this is an identity.

@@ -76,11 +79,25 @@
  
  //------------------------------Ideal------------------------------------------
  // Return a node which is more "ideal" than the current node.  Strip out
  // control copies
  Node *ConstraintCastNode::Ideal(PhaseGVN *phase, bool can_reshape) {
-   return (in(0) && remove_dead_region(phase, can_reshape)) ? this : NULL;
+   if (in(0) && remove_dead_region(phase, can_reshape)) {
+     return this;
+   }
+ 
+   // Push cast through InlineTypePtrNode
+   InlineTypePtrNode* vt = in(1)->isa_InlineTypePtr();
+   if (vt != NULL && phase->type(vt)->filter_speculative(_type) != Type::TOP) {
+     Node* cast = clone();
+     cast->set_req(1, vt->get_oop());
+     vt = vt->clone()->as_InlineTypePtr();
+     vt->set_oop(phase->transform(cast));
+     return vt;
+   }
+ 
+   return NULL;
  }
  
  bool ConstraintCastNode::cmp(const Node &n) const {
    return TypeNode::cmp(n) && ((ConstraintCastNode&)n)._dependency == _dependency;
  }

@@ -374,10 +391,13 @@
  
  //=============================================================================
  //------------------------------Identity---------------------------------------
  // If input is already higher or equal to cast type, then this is an identity.
  Node* CheckCastPPNode::Identity(PhaseGVN* phase) {
+   if (in(1)->is_InlineTypeBase() && _type->isa_oopptr() && phase->type(in(1))->inline_klass()->is_subtype_of(_type->is_oopptr()->klass())) {
+     return in(1);
+   }
    Node* dom = dominating_cast(phase, phase);
    if (dom != NULL) {
      return dom;
    }
    if (_dependency != RegularDependency) {

@@ -405,12 +425,21 @@
    if( inn == Type::TOP ) return Type::TOP;  // No information yet
  
    const TypePtr *in_type   = inn->isa_ptr();
    const TypePtr *my_type   = _type->isa_ptr();
    const Type *result = _type;
-   if( in_type != NULL && my_type != NULL ) {
-     TypePtr::PTR   in_ptr    = in_type->ptr();
+   if (in_type != NULL && my_type != NULL) {
+     if (!StressReflectiveCode && my_type->isa_aryptr() && in_type->isa_aryptr()) {
+       // Propagate array properties (not flat/null-free)
+       // Don't do this when StressReflectiveCode is enabled because it might lead to
+       // a dying data path while the corresponding flat/null-free check is not folded.
+       my_type = my_type->is_aryptr()->update_properties(in_type->is_aryptr());
+       if (my_type == NULL) {
+         return Type::TOP; // Inconsistent properties
+       }
+     }
+     TypePtr::PTR in_ptr = in_type->ptr();
      if (in_ptr == TypePtr::Null) {
        result = in_type;
      } else if (in_ptr == TypePtr::Constant) {
        if (my_type->isa_rawptr()) {
          result = my_type;

@@ -561,10 +590,26 @@
    if (t == Type::TOP) return Type::TOP;
    if (t->base() == Type::RawPtr && t->singleton()) {
      uintptr_t bits = (uintptr_t) t->is_rawptr()->get_con();
      return TypeX::make(bits);
    }
+ 
+   if (t->is_zero_type() || !t->maybe_null()) {
+     for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
+       Node* u = fast_out(i);
+       if (u->Opcode() == Op_OrL) {
+         for (DUIterator_Fast jmax, j = u->fast_outs(jmax); j < jmax; j++) {
+           Node* cmp = u->fast_out(j);
+           if (cmp->Opcode() == Op_CmpL) {
+             // Give CmpL a chance to get optimized
+             phase->record_for_igvn(cmp);
+           }
+         }
+       }
+     }
+   }
+ 
    return CastP2XNode::bottom_type();
  }
  
  Node *CastP2XNode::Ideal(PhaseGVN *phase, bool can_reshape) {
    return (in(0) && remove_dead_region(phase, can_reshape)) ? this : NULL;
< prev index next >