1 /* 2 * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OOPS_INSTANCEKLASS_INLINE_HPP 26 #define SHARE_OOPS_INSTANCEKLASS_INLINE_HPP 27 28 #include "oops/instanceKlass.hpp" 29 30 #include "classfile/javaClasses.hpp" 31 #include "classfile/vmSymbols.hpp" 32 #include "memory/resourceArea.hpp" 33 #include "oops/klass.inline.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "runtime/atomic.hpp" 36 #include "utilities/debug.hpp" 37 #include "utilities/devirtualizer.inline.hpp" 38 #include "utilities/globalDefinitions.hpp" 39 #include "utilities/macros.hpp" 40 41 inline intptr_t* InstanceKlass::start_of_itable() const { return (intptr_t*)start_of_vtable() + vtable_length(); } 42 inline intptr_t* InstanceKlass::end_of_itable() const { return start_of_itable() + itable_length(); } 43 44 inline int InstanceKlass::itable_offset_in_words() const { return start_of_itable() - (intptr_t*)this; } 45 46 inline oop InstanceKlass::static_field_base_raw() { return java_mirror(); } 47 48 inline OopMapBlock* InstanceKlass::start_of_nonstatic_oop_maps() const { 49 return (OopMapBlock*)(start_of_itable() + itable_length()); 50 } 51 52 inline Klass** InstanceKlass::end_of_nonstatic_oop_maps() const { 53 return (Klass**)(start_of_nonstatic_oop_maps() + 54 nonstatic_oop_map_count()); 55 } 56 57 inline InstanceKlass* volatile* InstanceKlass::adr_implementor() const { 58 if (is_interface()) { 59 return (InstanceKlass* volatile*)end_of_nonstatic_oop_maps(); 60 } else { 61 return NULL; 62 } 63 } 64 65 inline address InstanceKlass::adr_inline_type_field_klasses() const { 66 if (has_inline_type_fields()) { 67 InstanceKlass* volatile* adr_impl = adr_implementor(); 68 if (adr_impl != NULL) { 69 return (address)(adr_impl + 1); 70 } 71 72 return (address)end_of_nonstatic_oop_maps(); 73 } else { 74 return NULL; 75 } 76 } 77 78 inline Klass* InstanceKlass::get_inline_type_field_klass(int idx) const { 79 assert(has_inline_type_fields(), "Sanity checking"); 80 assert(idx < java_fields_count(), "IOOB"); 81 Klass* k = ((Klass**)adr_inline_type_field_klasses())[idx]; 82 assert(k != NULL, "Should always be set before being read"); 83 assert(k->is_inline_klass(), "Must be an inline type"); 84 return k; 85 } 86 87 inline Klass* InstanceKlass::get_inline_type_field_klass_or_null(int idx) const { 88 assert(has_inline_type_fields(), "Sanity checking"); 89 assert(idx < java_fields_count(), "IOOB"); 90 Klass* k = ((Klass**)adr_inline_type_field_klasses())[idx]; 91 assert(k == NULL || k->is_inline_klass(), "Must be an inline type"); 92 return k; 93 } 94 95 inline void InstanceKlass::set_inline_type_field_klass(int idx, Klass* k) { 96 assert(has_inline_type_fields(), "Sanity checking"); 97 assert(idx < java_fields_count(), "IOOB"); 98 assert(k != NULL, "Should not be set to NULL"); 99 assert(((Klass**)adr_inline_type_field_klasses())[idx] == NULL, "Should not be set twice"); 100 ((Klass**)adr_inline_type_field_klasses())[idx] = k; 101 } 102 103 inline void InstanceKlass::reset_inline_type_field_klass(int idx) { 104 assert(has_inline_type_fields(), "Sanity checking"); 105 assert(idx < java_fields_count(), "IOOB"); 106 ((Klass**)adr_inline_type_field_klasses())[idx] = NULL; 107 } 108 109 110 inline ArrayKlass* InstanceKlass::array_klasses_acquire() const { 111 return Atomic::load_acquire(&_array_klasses); 112 } 113 114 inline void InstanceKlass::release_set_array_klasses(ArrayKlass* k) { 115 Atomic::release_store(&_array_klasses, k); 116 } 117 118 inline jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const { 119 return Atomic::load_acquire(&_methods_jmethod_ids); 120 } 121 122 inline void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) { 123 Atomic::release_store(&_methods_jmethod_ids, jmeths); 124 } 125 126 // The iteration over the oops in objects is a hot path in the GC code. 127 // By force inlining the following functions, we get similar GC performance 128 // as the previous macro based implementation. 129 130 template <typename T, class OopClosureType> 131 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map(OopMapBlock* map, oop obj, OopClosureType* closure) { 132 T* p = obj->field_addr<T>(map->offset()); 133 T* const end = p + map->count(); 134 135 for (; p < end; ++p) { 136 Devirtualizer::do_oop(closure, p); 137 } 138 } 139 140 template <typename T, class OopClosureType> 141 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map_reverse(OopMapBlock* map, oop obj, OopClosureType* closure) { 142 T* const start = obj->field_addr<T>(map->offset()); 143 T* p = start + map->count(); 144 145 while (start < p) { 146 --p; 147 Devirtualizer::do_oop(closure, p); 148 } 149 } 150 151 template <typename T, class OopClosureType> 152 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map_bounded(OopMapBlock* map, oop obj, OopClosureType* closure, MemRegion mr) { 153 T* p = obj->field_addr<T>(map->offset()); 154 T* end = p + map->count(); 155 156 T* const l = (T*)mr.start(); 157 T* const h = (T*)mr.end(); 158 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && 159 mask_bits((intptr_t)h, sizeof(T)-1) == 0, 160 "bounded region must be properly aligned"); 161 162 if (p < l) { 163 p = l; 164 } 165 if (end > h) { 166 end = h; 167 } 168 169 for (;p < end; ++p) { 170 Devirtualizer::do_oop(closure, p); 171 } 172 } 173 174 template <typename T, class OopClosureType> 175 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps(oop obj, OopClosureType* closure) { 176 OopMapBlock* map = start_of_nonstatic_oop_maps(); 177 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); 178 179 for (; map < end_map; ++map) { 180 oop_oop_iterate_oop_map<T>(map, obj, closure); 181 } 182 } 183 184 template <typename T, class OopClosureType> 185 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps_reverse(oop obj, OopClosureType* closure) { 186 OopMapBlock* const start_map = start_of_nonstatic_oop_maps(); 187 OopMapBlock* map = start_map + nonstatic_oop_map_count(); 188 189 while (start_map < map) { 190 --map; 191 oop_oop_iterate_oop_map_reverse<T>(map, obj, closure); 192 } 193 } 194 195 template <typename T, class OopClosureType> 196 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 197 OopMapBlock* map = start_of_nonstatic_oop_maps(); 198 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); 199 200 for (;map < end_map; ++map) { 201 oop_oop_iterate_oop_map_bounded<T>(map, obj, closure, mr); 202 } 203 } 204 205 template <typename T, class OopClosureType> 206 ALWAYSINLINE void InstanceKlass::oop_oop_iterate(oop obj, OopClosureType* closure) { 207 if (Devirtualizer::do_metadata(closure)) { 208 Devirtualizer::do_klass(closure, this); 209 } 210 211 oop_oop_iterate_oop_maps<T>(obj, closure); 212 } 213 214 template <typename T, class OopClosureType> 215 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_reverse(oop obj, OopClosureType* closure) { 216 assert(!Devirtualizer::do_metadata(closure), 217 "Code to handle metadata is not implemented"); 218 219 oop_oop_iterate_oop_maps_reverse<T>(obj, closure); 220 } 221 222 template <typename T, class OopClosureType> 223 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 224 if (Devirtualizer::do_metadata(closure)) { 225 if (mr.contains(obj)) { 226 Devirtualizer::do_klass(closure, this); 227 } 228 } 229 230 oop_oop_iterate_oop_maps_bounded<T>(obj, closure, mr); 231 } 232 233 inline instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) { 234 Klass* k = java_lang_Class::as_Klass(java_class); 235 if (k == NULL) { 236 ResourceMark rm(THREAD); 237 THROW_(vmSymbols::java_lang_InstantiationException(), NULL); 238 } 239 InstanceKlass* ik = cast(k); 240 ik->check_valid_for_instantiation(false, CHECK_NULL); 241 ik->initialize(CHECK_NULL); 242 return ik->allocate_instance(THREAD); 243 } 244 245 #endif // SHARE_OOPS_INSTANCEKLASS_INLINE_HPP