< prev index next >

src/hotspot/share/opto/memnode.cpp

Print this page

1911   // Steps (a), (b):  Walk past independent stores to find an exact match.
1912   if (prev_mem != nullptr && prev_mem != in(MemNode::Memory)) {
1913     // (c) See if we can fold up on the spot, but don't fold up here.
1914     // Fold-up might require truncation (for LoadB/LoadS/LoadUS) or
1915     // just return a prior value, which is done by Identity calls.
1916     if (can_see_stored_value(prev_mem, phase)) {
1917       // Make ready for step (d):
1918       set_req_X(MemNode::Memory, prev_mem, phase);
1919       return this;
1920     }
1921   }
1922 
1923   return progress ? this : nullptr;
1924 }
1925 
1926 // Helper to recognize certain Klass fields which are invariant across
1927 // some group of array types (e.g., int[] or all T[] where T < Object).
1928 const Type*
1929 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1930                                  ciKlass* klass) const {







1931   if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1932     // The field is Klass::_modifier_flags.  Return its (constant) value.
1933     // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1934     assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1935     return TypeInt::make(klass->modifier_flags());
1936   }
1937   if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1938     // The field is Klass::_access_flags.  Return its (constant) value.
1939     // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1940     assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1941     return TypeInt::make(klass->access_flags());
1942   }
1943   if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1944     // The field is Klass::_layout_helper.  Return its constant value if known.
1945     assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1946     return TypeInt::make(klass->layout_helper());
1947   }
1948 
1949   // No match.
1950   return nullptr;

2083         assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2084         assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2085         return TypeInstPtr::make(klass->java_mirror());
2086       }
2087     }
2088   }
2089 
2090   const TypeKlassPtr *tkls = tp->isa_klassptr();
2091   if (tkls != nullptr) {
2092     if (tkls->is_loaded() && tkls->klass_is_exact()) {
2093       ciKlass* klass = tkls->exact_klass();
2094       // We are loading a field from a Klass metaobject whose identity
2095       // is known at compile time (the type is "exact" or "precise").
2096       // Check for fields we know are maintained as constants by the VM.
2097       if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2098         // The field is Klass::_super_check_offset.  Return its (constant) value.
2099         // (Folds up type checking code.)
2100         assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2101         return TypeInt::make(klass->super_check_offset());
2102       }







2103       // Compute index into primary_supers array
2104       juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2105       // Check for overflowing; use unsigned compare to handle the negative case.
2106       if( depth < ciKlass::primary_super_limit() ) {
2107         // The field is an element of Klass::_primary_supers.  Return its (constant) value.
2108         // (Folds up type checking code.)
2109         assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2110         ciKlass *ss = klass->super_of_depth(depth);
2111         return ss ? TypeKlassPtr::make(ss, Type::trust_interfaces) : TypePtr::NULL_PTR;
2112       }
2113       const Type* aift = load_array_final_field(tkls, klass);
2114       if (aift != nullptr)  return aift;
2115     }
2116 
2117     // We can still check if we are loading from the primary_supers array at a
2118     // shallow enough depth.  Even though the klass is not exact, entries less
2119     // than or equal to its super depth are correct.
2120     if (tkls->is_loaded()) {
2121       ciKlass* klass = nullptr;
2122       if (tkls->isa_instklassptr()) {

2173     if (value != nullptr && value->is_Con()) {
2174       assert(value->bottom_type()->higher_equal(_type),"sanity");
2175       return value->bottom_type();
2176     }
2177   }
2178 
2179   bool is_vect = (_type->isa_vect() != nullptr);
2180   if (is_instance && !is_vect) {
2181     // If we have an instance type and our memory input is the
2182     // programs's initial memory state, there is no matching store,
2183     // so just return a zero of the appropriate type -
2184     // except if it is vectorized - then we have no zero constant.
2185     Node *mem = in(MemNode::Memory);
2186     if (mem->is_Parm() && mem->in(0)->is_Start()) {
2187       assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2188       return Type::get_zero_type(_type->basic_type());
2189     }
2190   }
2191 
2192   Node* alloc = is_new_object_mark_load();
2193   if (alloc != nullptr) {
2194     return TypeX::make(markWord::prototype().value());
2195   }
2196 
2197   return _type;
2198 }
2199 
2200 //------------------------------match_edge-------------------------------------
2201 // Do we Match on this edge index or not?  Match only the address.
2202 uint LoadNode::match_edge(uint idx) const {
2203   return idx == MemNode::Address;
2204 }
2205 
2206 //--------------------------LoadBNode::Ideal--------------------------------------
2207 //
2208 //  If the previous store is to the same address as this load,
2209 //  and the value stored was larger than a byte, replace this load
2210 //  with the value stored truncated to a byte.  If no truncation is
2211 //  needed, the replacement is done in LoadNode::Identity().
2212 //
2213 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {

1911   // Steps (a), (b):  Walk past independent stores to find an exact match.
1912   if (prev_mem != nullptr && prev_mem != in(MemNode::Memory)) {
1913     // (c) See if we can fold up on the spot, but don't fold up here.
1914     // Fold-up might require truncation (for LoadB/LoadS/LoadUS) or
1915     // just return a prior value, which is done by Identity calls.
1916     if (can_see_stored_value(prev_mem, phase)) {
1917       // Make ready for step (d):
1918       set_req_X(MemNode::Memory, prev_mem, phase);
1919       return this;
1920     }
1921   }
1922 
1923   return progress ? this : nullptr;
1924 }
1925 
1926 // Helper to recognize certain Klass fields which are invariant across
1927 // some group of array types (e.g., int[] or all T[] where T < Object).
1928 const Type*
1929 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1930                                  ciKlass* klass) const {
1931   if (UseCompactObjectHeaders) {
1932     if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
1933       // The field is Klass::_prototype_header.  Return its (constant) value.
1934       assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
1935       return TypeX::make(klass->prototype_header());
1936     }
1937   }
1938   if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1939     // The field is Klass::_modifier_flags.  Return its (constant) value.
1940     // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1941     assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1942     return TypeInt::make(klass->modifier_flags());
1943   }
1944   if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1945     // The field is Klass::_access_flags.  Return its (constant) value.
1946     // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1947     assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1948     return TypeInt::make(klass->access_flags());
1949   }
1950   if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1951     // The field is Klass::_layout_helper.  Return its constant value if known.
1952     assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1953     return TypeInt::make(klass->layout_helper());
1954   }
1955 
1956   // No match.
1957   return nullptr;

2090         assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2091         assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2092         return TypeInstPtr::make(klass->java_mirror());
2093       }
2094     }
2095   }
2096 
2097   const TypeKlassPtr *tkls = tp->isa_klassptr();
2098   if (tkls != nullptr) {
2099     if (tkls->is_loaded() && tkls->klass_is_exact()) {
2100       ciKlass* klass = tkls->exact_klass();
2101       // We are loading a field from a Klass metaobject whose identity
2102       // is known at compile time (the type is "exact" or "precise").
2103       // Check for fields we know are maintained as constants by the VM.
2104       if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2105         // The field is Klass::_super_check_offset.  Return its (constant) value.
2106         // (Folds up type checking code.)
2107         assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2108         return TypeInt::make(klass->super_check_offset());
2109       }
2110       if (UseCompactObjectHeaders) {
2111         if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
2112           // The field is Klass::_prototype_header. Return its (constant) value.
2113           assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
2114           return TypeX::make(klass->prototype_header());
2115         }
2116       }
2117       // Compute index into primary_supers array
2118       juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2119       // Check for overflowing; use unsigned compare to handle the negative case.
2120       if( depth < ciKlass::primary_super_limit() ) {
2121         // The field is an element of Klass::_primary_supers.  Return its (constant) value.
2122         // (Folds up type checking code.)
2123         assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2124         ciKlass *ss = klass->super_of_depth(depth);
2125         return ss ? TypeKlassPtr::make(ss, Type::trust_interfaces) : TypePtr::NULL_PTR;
2126       }
2127       const Type* aift = load_array_final_field(tkls, klass);
2128       if (aift != nullptr)  return aift;
2129     }
2130 
2131     // We can still check if we are loading from the primary_supers array at a
2132     // shallow enough depth.  Even though the klass is not exact, entries less
2133     // than or equal to its super depth are correct.
2134     if (tkls->is_loaded()) {
2135       ciKlass* klass = nullptr;
2136       if (tkls->isa_instklassptr()) {

2187     if (value != nullptr && value->is_Con()) {
2188       assert(value->bottom_type()->higher_equal(_type),"sanity");
2189       return value->bottom_type();
2190     }
2191   }
2192 
2193   bool is_vect = (_type->isa_vect() != nullptr);
2194   if (is_instance && !is_vect) {
2195     // If we have an instance type and our memory input is the
2196     // programs's initial memory state, there is no matching store,
2197     // so just return a zero of the appropriate type -
2198     // except if it is vectorized - then we have no zero constant.
2199     Node *mem = in(MemNode::Memory);
2200     if (mem->is_Parm() && mem->in(0)->is_Start()) {
2201       assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2202       return Type::get_zero_type(_type->basic_type());
2203     }
2204   }
2205 
2206   Node* alloc = is_new_object_mark_load();
2207   if (!UseCompactObjectHeaders && alloc != nullptr) {
2208     return TypeX::make(markWord::prototype().value());
2209   }
2210 
2211   return _type;
2212 }
2213 
2214 //------------------------------match_edge-------------------------------------
2215 // Do we Match on this edge index or not?  Match only the address.
2216 uint LoadNode::match_edge(uint idx) const {
2217   return idx == MemNode::Address;
2218 }
2219 
2220 //--------------------------LoadBNode::Ideal--------------------------------------
2221 //
2222 //  If the previous store is to the same address as this load,
2223 //  and the value stored was larger than a byte, replace this load
2224 //  with the value stored truncated to a byte.  If no truncation is
2225 //  needed, the replacement is done in LoadNode::Identity().
2226 //
2227 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {
< prev index next >