1886 // Steps (a), (b): Walk past independent stores to find an exact match.
1887 if (prev_mem != nullptr && prev_mem != in(MemNode::Memory)) {
1888 // (c) See if we can fold up on the spot, but don't fold up here.
1889 // Fold-up might require truncation (for LoadB/LoadS/LoadUS) or
1890 // just return a prior value, which is done by Identity calls.
1891 if (can_see_stored_value(prev_mem, phase)) {
1892 // Make ready for step (d):
1893 set_req_X(MemNode::Memory, prev_mem, phase);
1894 return this;
1895 }
1896 }
1897
1898 return progress ? this : nullptr;
1899 }
1900
1901 // Helper to recognize certain Klass fields which are invariant across
1902 // some group of array types (e.g., int[] or all T[] where T < Object).
1903 const Type*
1904 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1905 ciKlass* klass) const {
1906 if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1907 // The field is Klass::_modifier_flags. Return its (constant) value.
1908 // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1909 assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1910 return TypeInt::make(klass->modifier_flags());
1911 }
1912 if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1913 // The field is Klass::_access_flags. Return its (constant) value.
1914 // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1915 assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1916 return TypeInt::make(klass->access_flags());
1917 }
1918 if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1919 // The field is Klass::_layout_helper. Return its constant value if known.
1920 assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1921 return TypeInt::make(klass->layout_helper());
1922 }
1923
1924 // No match.
1925 return nullptr;
2058 assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2059 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2060 return TypeInstPtr::make(klass->java_mirror());
2061 }
2062 }
2063 }
2064
2065 const TypeKlassPtr *tkls = tp->isa_klassptr();
2066 if (tkls != nullptr) {
2067 if (tkls->is_loaded() && tkls->klass_is_exact()) {
2068 ciKlass* klass = tkls->exact_klass();
2069 // We are loading a field from a Klass metaobject whose identity
2070 // is known at compile time (the type is "exact" or "precise").
2071 // Check for fields we know are maintained as constants by the VM.
2072 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2073 // The field is Klass::_super_check_offset. Return its (constant) value.
2074 // (Folds up type checking code.)
2075 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2076 return TypeInt::make(klass->super_check_offset());
2077 }
2078 // Compute index into primary_supers array
2079 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2080 // Check for overflowing; use unsigned compare to handle the negative case.
2081 if( depth < ciKlass::primary_super_limit() ) {
2082 // The field is an element of Klass::_primary_supers. Return its (constant) value.
2083 // (Folds up type checking code.)
2084 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2085 ciKlass *ss = klass->super_of_depth(depth);
2086 return ss ? TypeKlassPtr::make(ss, Type::trust_interfaces) : TypePtr::NULL_PTR;
2087 }
2088 const Type* aift = load_array_final_field(tkls, klass);
2089 if (aift != nullptr) return aift;
2090 }
2091
2092 // We can still check if we are loading from the primary_supers array at a
2093 // shallow enough depth. Even though the klass is not exact, entries less
2094 // than or equal to its super depth are correct.
2095 if (tkls->is_loaded()) {
2096 ciKlass* klass = nullptr;
2097 if (tkls->isa_instklassptr()) {
2148 if (value != nullptr && value->is_Con()) {
2149 assert(value->bottom_type()->higher_equal(_type),"sanity");
2150 return value->bottom_type();
2151 }
2152 }
2153
2154 bool is_vect = (_type->isa_vect() != nullptr);
2155 if (is_instance && !is_vect) {
2156 // If we have an instance type and our memory input is the
2157 // programs's initial memory state, there is no matching store,
2158 // so just return a zero of the appropriate type -
2159 // except if it is vectorized - then we have no zero constant.
2160 Node *mem = in(MemNode::Memory);
2161 if (mem->is_Parm() && mem->in(0)->is_Start()) {
2162 assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2163 return Type::get_zero_type(_type->basic_type());
2164 }
2165 }
2166
2167 Node* alloc = is_new_object_mark_load();
2168 if (alloc != nullptr) {
2169 return TypeX::make(markWord::prototype().value());
2170 }
2171
2172 return _type;
2173 }
2174
2175 //------------------------------match_edge-------------------------------------
2176 // Do we Match on this edge index or not? Match only the address.
2177 uint LoadNode::match_edge(uint idx) const {
2178 return idx == MemNode::Address;
2179 }
2180
2181 //--------------------------LoadBNode::Ideal--------------------------------------
2182 //
2183 // If the previous store is to the same address as this load,
2184 // and the value stored was larger than a byte, replace this load
2185 // with the value stored truncated to a byte. If no truncation is
2186 // needed, the replacement is done in LoadNode::Identity().
2187 //
2188 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {
|
1886 // Steps (a), (b): Walk past independent stores to find an exact match.
1887 if (prev_mem != nullptr && prev_mem != in(MemNode::Memory)) {
1888 // (c) See if we can fold up on the spot, but don't fold up here.
1889 // Fold-up might require truncation (for LoadB/LoadS/LoadUS) or
1890 // just return a prior value, which is done by Identity calls.
1891 if (can_see_stored_value(prev_mem, phase)) {
1892 // Make ready for step (d):
1893 set_req_X(MemNode::Memory, prev_mem, phase);
1894 return this;
1895 }
1896 }
1897
1898 return progress ? this : nullptr;
1899 }
1900
1901 // Helper to recognize certain Klass fields which are invariant across
1902 // some group of array types (e.g., int[] or all T[] where T < Object).
1903 const Type*
1904 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1905 ciKlass* klass) const {
1906 if (UseCompactObjectHeaders) {
1907 if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
1908 // The field is Klass::_prototype_header. Return its (constant) value.
1909 assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
1910 return TypeX::make(klass->prototype_header());
1911 }
1912 }
1913 if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1914 // The field is Klass::_modifier_flags. Return its (constant) value.
1915 // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1916 assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1917 return TypeInt::make(klass->modifier_flags());
1918 }
1919 if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1920 // The field is Klass::_access_flags. Return its (constant) value.
1921 // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1922 assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1923 return TypeInt::make(klass->access_flags());
1924 }
1925 if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1926 // The field is Klass::_layout_helper. Return its constant value if known.
1927 assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1928 return TypeInt::make(klass->layout_helper());
1929 }
1930
1931 // No match.
1932 return nullptr;
2065 assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2066 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2067 return TypeInstPtr::make(klass->java_mirror());
2068 }
2069 }
2070 }
2071
2072 const TypeKlassPtr *tkls = tp->isa_klassptr();
2073 if (tkls != nullptr) {
2074 if (tkls->is_loaded() && tkls->klass_is_exact()) {
2075 ciKlass* klass = tkls->exact_klass();
2076 // We are loading a field from a Klass metaobject whose identity
2077 // is known at compile time (the type is "exact" or "precise").
2078 // Check for fields we know are maintained as constants by the VM.
2079 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2080 // The field is Klass::_super_check_offset. Return its (constant) value.
2081 // (Folds up type checking code.)
2082 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2083 return TypeInt::make(klass->super_check_offset());
2084 }
2085 if (UseCompactObjectHeaders) {
2086 if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
2087 // The field is Klass::_prototype_header. Return its (constant) value.
2088 assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
2089 return TypeX::make(klass->prototype_header());
2090 }
2091 }
2092 // Compute index into primary_supers array
2093 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2094 // Check for overflowing; use unsigned compare to handle the negative case.
2095 if( depth < ciKlass::primary_super_limit() ) {
2096 // The field is an element of Klass::_primary_supers. Return its (constant) value.
2097 // (Folds up type checking code.)
2098 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2099 ciKlass *ss = klass->super_of_depth(depth);
2100 return ss ? TypeKlassPtr::make(ss, Type::trust_interfaces) : TypePtr::NULL_PTR;
2101 }
2102 const Type* aift = load_array_final_field(tkls, klass);
2103 if (aift != nullptr) return aift;
2104 }
2105
2106 // We can still check if we are loading from the primary_supers array at a
2107 // shallow enough depth. Even though the klass is not exact, entries less
2108 // than or equal to its super depth are correct.
2109 if (tkls->is_loaded()) {
2110 ciKlass* klass = nullptr;
2111 if (tkls->isa_instklassptr()) {
2162 if (value != nullptr && value->is_Con()) {
2163 assert(value->bottom_type()->higher_equal(_type),"sanity");
2164 return value->bottom_type();
2165 }
2166 }
2167
2168 bool is_vect = (_type->isa_vect() != nullptr);
2169 if (is_instance && !is_vect) {
2170 // If we have an instance type and our memory input is the
2171 // programs's initial memory state, there is no matching store,
2172 // so just return a zero of the appropriate type -
2173 // except if it is vectorized - then we have no zero constant.
2174 Node *mem = in(MemNode::Memory);
2175 if (mem->is_Parm() && mem->in(0)->is_Start()) {
2176 assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2177 return Type::get_zero_type(_type->basic_type());
2178 }
2179 }
2180
2181 Node* alloc = is_new_object_mark_load();
2182 if (!UseCompactObjectHeaders && alloc != nullptr) {
2183 return TypeX::make(markWord::prototype().value());
2184 }
2185
2186 return _type;
2187 }
2188
2189 //------------------------------match_edge-------------------------------------
2190 // Do we Match on this edge index or not? Match only the address.
2191 uint LoadNode::match_edge(uint idx) const {
2192 return idx == MemNode::Address;
2193 }
2194
2195 //--------------------------LoadBNode::Ideal--------------------------------------
2196 //
2197 // If the previous store is to the same address as this load,
2198 // and the value stored was larger than a byte, replace this load
2199 // with the value stored truncated to a byte. If no truncation is
2200 // needed, the replacement is done in LoadNode::Identity().
2201 //
2202 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {
|