1835 set_req_X(MemNode::Memory, prev_mem, phase);
1836 return this;
1837 }
1838 }
1839
1840 AllocateNode* alloc = is_new_object_mark_load(phase);
1841 if (alloc != NULL && alloc->Opcode() == Op_Allocate && UseBiasedLocking) {
1842 InitializeNode* init = alloc->initialization();
1843 Node* control = init->proj_out(0);
1844 return alloc->make_ideal_mark(phase, address, control, mem);
1845 }
1846
1847 return progress ? this : nullptr;
1848 }
1849
1850 // Helper to recognize certain Klass fields which are invariant across
1851 // some group of array types (e.g., int[] or all T[] where T < Object).
1852 const Type*
1853 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1854 ciKlass* klass) const {
1855 if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1856 // The field is Klass::_modifier_flags. Return its (constant) value.
1857 // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1858 assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1859 return TypeInt::make(klass->modifier_flags());
1860 }
1861 if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1862 // The field is Klass::_access_flags. Return its (constant) value.
1863 // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1864 assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1865 return TypeInt::make(klass->access_flags());
1866 }
1867 if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1868 // The field is Klass::_layout_helper. Return its constant value if known.
1869 assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1870 return TypeInt::make(klass->layout_helper());
1871 }
1872
1873 // No match.
1874 return nullptr;
2005 assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2006 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2007 return TypeInstPtr::make(klass->java_mirror());
2008 }
2009 }
2010 }
2011
2012 const TypeKlassPtr *tkls = tp->isa_klassptr();
2013 if (tkls != nullptr && !StressReflectiveCode) {
2014 ciKlass* klass = tkls->klass();
2015 if (klass->is_loaded() && tkls->klass_is_exact()) {
2016 // We are loading a field from a Klass metaobject whose identity
2017 // is known at compile time (the type is "exact" or "precise").
2018 // Check for fields we know are maintained as constants by the VM.
2019 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2020 // The field is Klass::_super_check_offset. Return its (constant) value.
2021 // (Folds up type checking code.)
2022 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2023 return TypeInt::make(klass->super_check_offset());
2024 }
2025 // Compute index into primary_supers array
2026 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2027 // Check for overflowing; use unsigned compare to handle the negative case.
2028 if( depth < ciKlass::primary_super_limit() ) {
2029 // The field is an element of Klass::_primary_supers. Return its (constant) value.
2030 // (Folds up type checking code.)
2031 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2032 ciKlass *ss = klass->super_of_depth(depth);
2033 return ss ? TypeKlassPtr::make(ss) : TypePtr::NULL_PTR;
2034 }
2035 const Type* aift = load_array_final_field(tkls, klass);
2036 if (aift != nullptr) return aift;
2037 }
2038
2039 // We can still check if we are loading from the primary_supers array at a
2040 // shallow enough depth. Even though the klass is not exact, entries less
2041 // than or equal to its super depth are correct.
2042 if (klass->is_loaded() ) {
2043 ciType *inner = klass;
2044 while( inner->is_obj_array_klass() )
2089 if (value != nullptr && value->is_Con()) {
2090 assert(value->bottom_type()->higher_equal(_type),"sanity");
2091 return value->bottom_type();
2092 }
2093 }
2094
2095 bool is_vect = (_type->isa_vect() != nullptr);
2096 if (is_instance && !is_vect) {
2097 // If we have an instance type and our memory input is the
2098 // programs's initial memory state, there is no matching store,
2099 // so just return a zero of the appropriate type -
2100 // except if it is vectorized - then we have no zero constant.
2101 Node *mem = in(MemNode::Memory);
2102 if (mem->is_Parm() && mem->in(0)->is_Start()) {
2103 assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2104 return Type::get_zero_type(_type->basic_type());
2105 }
2106 }
2107
2108 Node* alloc = is_new_object_mark_load(phase);
2109 if (alloc != nullptr && !(alloc->Opcode() == Op_Allocate && UseBiasedLocking)) {
2110 return TypeX::make(markWord::prototype().value());
2111 }
2112
2113 return _type;
2114 }
2115
2116 //------------------------------match_edge-------------------------------------
2117 // Do we Match on this edge index or not? Match only the address.
2118 uint LoadNode::match_edge(uint idx) const {
2119 return idx == MemNode::Address;
2120 }
2121
2122 //--------------------------LoadBNode::Ideal--------------------------------------
2123 //
2124 // If the previous store is to the same address as this load,
2125 // and the value stored was larger than a byte, replace this load
2126 // with the value stored truncated to a byte. If no truncation is
2127 // needed, the replacement is done in LoadNode::Identity().
2128 //
2129 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {
|
1835 set_req_X(MemNode::Memory, prev_mem, phase);
1836 return this;
1837 }
1838 }
1839
1840 AllocateNode* alloc = is_new_object_mark_load(phase);
1841 if (alloc != NULL && alloc->Opcode() == Op_Allocate && UseBiasedLocking) {
1842 InitializeNode* init = alloc->initialization();
1843 Node* control = init->proj_out(0);
1844 return alloc->make_ideal_mark(phase, address, control, mem);
1845 }
1846
1847 return progress ? this : nullptr;
1848 }
1849
1850 // Helper to recognize certain Klass fields which are invariant across
1851 // some group of array types (e.g., int[] or all T[] where T < Object).
1852 const Type*
1853 LoadNode::load_array_final_field(const TypeKlassPtr *tkls,
1854 ciKlass* klass) const {
1855 if (UseCompactObjectHeaders) {
1856 if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
1857 // The field is Klass::_prototype_header. Return its (constant) value.
1858 assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
1859 return TypeX::make(klass->prototype_header());
1860 }
1861 }
1862 if (tkls->offset() == in_bytes(Klass::modifier_flags_offset())) {
1863 // The field is Klass::_modifier_flags. Return its (constant) value.
1864 // (Folds up the 2nd indirection in aClassConstant.getModifiers().)
1865 assert(this->Opcode() == Op_LoadI, "must load an int from _modifier_flags");
1866 return TypeInt::make(klass->modifier_flags());
1867 }
1868 if (tkls->offset() == in_bytes(Klass::access_flags_offset())) {
1869 // The field is Klass::_access_flags. Return its (constant) value.
1870 // (Folds up the 2nd indirection in Reflection.getClassAccessFlags(aClassConstant).)
1871 assert(this->Opcode() == Op_LoadI, "must load an int from _access_flags");
1872 return TypeInt::make(klass->access_flags());
1873 }
1874 if (tkls->offset() == in_bytes(Klass::layout_helper_offset())) {
1875 // The field is Klass::_layout_helper. Return its constant value if known.
1876 assert(this->Opcode() == Op_LoadI, "must load an int from _layout_helper");
1877 return TypeInt::make(klass->layout_helper());
1878 }
1879
1880 // No match.
1881 return nullptr;
2012 assert(adr->Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2013 assert(Opcode() == Op_LoadP, "must load an oop from _java_mirror");
2014 return TypeInstPtr::make(klass->java_mirror());
2015 }
2016 }
2017 }
2018
2019 const TypeKlassPtr *tkls = tp->isa_klassptr();
2020 if (tkls != nullptr && !StressReflectiveCode) {
2021 ciKlass* klass = tkls->klass();
2022 if (klass->is_loaded() && tkls->klass_is_exact()) {
2023 // We are loading a field from a Klass metaobject whose identity
2024 // is known at compile time (the type is "exact" or "precise").
2025 // Check for fields we know are maintained as constants by the VM.
2026 if (tkls->offset() == in_bytes(Klass::super_check_offset_offset())) {
2027 // The field is Klass::_super_check_offset. Return its (constant) value.
2028 // (Folds up type checking code.)
2029 assert(Opcode() == Op_LoadI, "must load an int from _super_check_offset");
2030 return TypeInt::make(klass->super_check_offset());
2031 }
2032 if (UseCompactObjectHeaders) {
2033 if (tkls->offset() == in_bytes(Klass::prototype_header_offset())) {
2034 // The field is Klass::_prototype_header. Return its (constant) value.
2035 assert(this->Opcode() == Op_LoadX, "must load a proper type from _prototype_header");
2036 return TypeX::make(klass->prototype_header());
2037 }
2038 }
2039 // Compute index into primary_supers array
2040 juint depth = (tkls->offset() - in_bytes(Klass::primary_supers_offset())) / sizeof(Klass*);
2041 // Check for overflowing; use unsigned compare to handle the negative case.
2042 if( depth < ciKlass::primary_super_limit() ) {
2043 // The field is an element of Klass::_primary_supers. Return its (constant) value.
2044 // (Folds up type checking code.)
2045 assert(Opcode() == Op_LoadKlass, "must load a klass from _primary_supers");
2046 ciKlass *ss = klass->super_of_depth(depth);
2047 return ss ? TypeKlassPtr::make(ss) : TypePtr::NULL_PTR;
2048 }
2049 const Type* aift = load_array_final_field(tkls, klass);
2050 if (aift != nullptr) return aift;
2051 }
2052
2053 // We can still check if we are loading from the primary_supers array at a
2054 // shallow enough depth. Even though the klass is not exact, entries less
2055 // than or equal to its super depth are correct.
2056 if (klass->is_loaded() ) {
2057 ciType *inner = klass;
2058 while( inner->is_obj_array_klass() )
2103 if (value != nullptr && value->is_Con()) {
2104 assert(value->bottom_type()->higher_equal(_type),"sanity");
2105 return value->bottom_type();
2106 }
2107 }
2108
2109 bool is_vect = (_type->isa_vect() != nullptr);
2110 if (is_instance && !is_vect) {
2111 // If we have an instance type and our memory input is the
2112 // programs's initial memory state, there is no matching store,
2113 // so just return a zero of the appropriate type -
2114 // except if it is vectorized - then we have no zero constant.
2115 Node *mem = in(MemNode::Memory);
2116 if (mem->is_Parm() && mem->in(0)->is_Start()) {
2117 assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm");
2118 return Type::get_zero_type(_type->basic_type());
2119 }
2120 }
2121
2122 Node* alloc = is_new_object_mark_load(phase);
2123 if (alloc != nullptr && !(alloc->Opcode() == Op_Allocate && UseBiasedLocking) && !UseCompactObjectHeaders) {
2124 return TypeX::make(markWord::prototype().value());
2125 }
2126
2127 return _type;
2128 }
2129
2130 //------------------------------match_edge-------------------------------------
2131 // Do we Match on this edge index or not? Match only the address.
2132 uint LoadNode::match_edge(uint idx) const {
2133 return idx == MemNode::Address;
2134 }
2135
2136 //--------------------------LoadBNode::Ideal--------------------------------------
2137 //
2138 // If the previous store is to the same address as this load,
2139 // and the value stored was larger than a byte, replace this load
2140 // with the value stored truncated to a byte. If no truncation is
2141 // needed, the replacement is done in LoadNode::Identity().
2142 //
2143 Node* LoadBNode::Ideal(PhaseGVN* phase, bool can_reshape) {
|