1 /* 2 * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OOPS_INSTANCEKLASS_INLINE_HPP 26 #define SHARE_OOPS_INSTANCEKLASS_INLINE_HPP 27 28 #include "oops/instanceKlass.hpp" 29 30 #include "classfile/javaClasses.hpp" 31 #include "classfile/vmSymbols.hpp" 32 #include "memory/resourceArea.hpp" 33 #include "oops/klass.inline.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "runtime/atomic.hpp" 36 #include "utilities/debug.hpp" 37 #include "utilities/devirtualizer.inline.hpp" 38 #include "utilities/globalDefinitions.hpp" 39 #include "utilities/macros.hpp" 40 41 inline intptr_t* InstanceKlass::start_of_itable() const { return (intptr_t*)start_of_vtable() + vtable_length(); } 42 inline intptr_t* InstanceKlass::end_of_itable() const { return start_of_itable() + itable_length(); } 43 44 inline int InstanceKlass::itable_offset_in_words() const { return start_of_itable() - (intptr_t*)this; } 45 46 inline oop InstanceKlass::static_field_base_raw() { return java_mirror(); } 47 48 inline OopMapBlock* InstanceKlass::start_of_nonstatic_oop_maps() const { 49 return (OopMapBlock*)(start_of_itable() + itable_length()); 50 } 51 52 inline Klass** InstanceKlass::end_of_nonstatic_oop_maps() const { 53 return (Klass**)(start_of_nonstatic_oop_maps() + 54 nonstatic_oop_map_count()); 55 } 56 57 inline InstanceKlass* volatile* InstanceKlass::adr_implementor() const { 58 if (is_interface()) { 59 return (InstanceKlass* volatile*)end_of_nonstatic_oop_maps(); 60 } else { 61 return NULL; 62 } 63 } 64 65 inline ObjArrayKlass* InstanceKlass::array_klasses_acquire() const { 66 return Atomic::load_acquire(&_array_klasses); 67 } 68 69 inline void InstanceKlass::release_set_array_klasses(ObjArrayKlass* k) { 70 Atomic::release_store(&_array_klasses, k); 71 } 72 73 inline jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const { 74 return Atomic::load_acquire(&_methods_jmethod_ids); 75 } 76 77 inline void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) { 78 Atomic::release_store(&_methods_jmethod_ids, jmeths); 79 } 80 81 // The iteration over the oops in objects is a hot path in the GC code. 82 // By force inlining the following functions, we get similar GC performance 83 // as the previous macro based implementation. 84 85 template <typename T, class OopClosureType> 86 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map(OopMapBlock* map, oop obj, OopClosureType* closure) { 87 T* p = obj->field_addr<T>(map->offset()); 88 T* const end = p + map->count(); 89 90 for (; p < end; ++p) { 91 Devirtualizer::do_oop(closure, p); 92 } 93 } 94 95 template <typename T, class OopClosureType> 96 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map_reverse(OopMapBlock* map, oop obj, OopClosureType* closure) { 97 T* const start = obj->field_addr<T>(map->offset()); 98 T* p = start + map->count(); 99 100 while (start < p) { 101 --p; 102 Devirtualizer::do_oop(closure, p); 103 } 104 } 105 106 template <typename T, class OopClosureType> 107 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_map_bounded(OopMapBlock* map, oop obj, OopClosureType* closure, MemRegion mr) { 108 T* p = obj->field_addr<T>(map->offset()); 109 T* end = p + map->count(); 110 111 T* const l = (T*)mr.start(); 112 T* const h = (T*)mr.end(); 113 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && 114 mask_bits((intptr_t)h, sizeof(T)-1) == 0, 115 "bounded region must be properly aligned"); 116 117 if (p < l) { 118 p = l; 119 } 120 if (end > h) { 121 end = h; 122 } 123 124 for (;p < end; ++p) { 125 Devirtualizer::do_oop(closure, p); 126 } 127 } 128 129 template <typename T, class OopClosureType> 130 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps(oop obj, OopClosureType* closure) { 131 OopMapBlock* map = start_of_nonstatic_oop_maps(); 132 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); 133 134 for (; map < end_map; ++map) { 135 oop_oop_iterate_oop_map<T>(map, obj, closure); 136 } 137 } 138 139 template <typename T, class OopClosureType> 140 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps_reverse(oop obj, OopClosureType* closure) { 141 OopMapBlock* const start_map = start_of_nonstatic_oop_maps(); 142 OopMapBlock* map = start_map + nonstatic_oop_map_count(); 143 144 while (start_map < map) { 145 --map; 146 oop_oop_iterate_oop_map_reverse<T>(map, obj, closure); 147 } 148 } 149 150 template <typename T, class OopClosureType> 151 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_oop_maps_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 152 OopMapBlock* map = start_of_nonstatic_oop_maps(); 153 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); 154 155 for (;map < end_map; ++map) { 156 oop_oop_iterate_oop_map_bounded<T>(map, obj, closure, mr); 157 } 158 } 159 160 template <typename T, class OopClosureType> 161 ALWAYSINLINE void InstanceKlass::oop_oop_iterate(oop obj, OopClosureType* closure) { 162 if (Devirtualizer::do_metadata(closure)) { 163 Devirtualizer::do_klass(closure, this); 164 } 165 166 oop_oop_iterate_oop_maps<T>(obj, closure); 167 } 168 169 template <typename T, class OopClosureType> 170 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_reverse(oop obj, OopClosureType* closure) { 171 assert(!Devirtualizer::do_metadata(closure), 172 "Code to handle metadata is not implemented"); 173 174 oop_oop_iterate_oop_maps_reverse<T>(obj, closure); 175 } 176 177 template <typename T, class OopClosureType> 178 ALWAYSINLINE void InstanceKlass::oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr) { 179 if (Devirtualizer::do_metadata(closure)) { 180 if (mr.contains(obj)) { 181 Devirtualizer::do_klass(closure, this); 182 } 183 } 184 185 oop_oop_iterate_oop_maps_bounded<T>(obj, closure, mr); 186 } 187 188 inline instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) { 189 Klass* k = java_lang_Class::as_Klass(java_class); 190 if (k == NULL) { 191 ResourceMark rm(THREAD); 192 THROW_(vmSymbols::java_lang_InstantiationException(), NULL); 193 } 194 InstanceKlass* ik = cast(k); 195 ik->check_valid_for_instantiation(false, CHECK_NULL); 196 ik->initialize(CHECK_NULL); 197 return ik->allocate_instance(THREAD); 198 } 199 200 #endif // SHARE_OOPS_INSTANCEKLASS_INLINE_HPP