1 /*
2 * Copyright (c) 2017, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_VM_OOPS_INLINEKLASS_HPP
26 #define SHARE_VM_OOPS_INLINEKLASS_HPP
27
28 #include "oops/inlineOop.hpp"
29 #include "oops/instanceKlass.hpp"
30 #include "oops/layoutKind.hpp"
31 #include "oops/oopsHierarchy.hpp"
32 #include "oops/valuePayload.hpp"
33 #include "runtime/handles.hpp"
34 #include "utilities/exceptions.hpp"
35 #include "utilities/globalDefinitions.hpp"
36
37 template <typename T>
38 class Array;
39 class ClassFileParser;
40 template <typename T>
41 class GrowableArray;
42 class Method;
43 class RegisterMap;
44 class SigEntry;
45
46 // An InlineKlass is a specialized InstanceKlass for concrete value classes
47 // (abstract value classes are represented by InstanceKlass)
48
49 class InlineKlass: public InstanceKlass {
50 friend class VMStructs;
51 friend class InstanceKlass;
52
53 public:
54 static const KlassKind Kind = InlineKlassKind;
55
56 // The member fields of the InlineKlass.
57 //
58 // All Klass objects have vtables starting at offset `sizeof(InstanceKlass)`.
59 //
60 // This has the effect that sub-klasses of InstanceKlass can't have their own
61 // C++ fields, because those would overlap with the vtables (or some of the
62 // other dynamically-sized sections).
63 //
64 // To work around this we stamp out the block members *after* all
65 // dynamically-sized sections belonging to the InstanceKlass part of the
66 // object.
67 //
68 // InlineKlass object layout:
69 // +-----------------------+
70 // | sizeof(InstanceKlass) |
71 // +-----------------------+ <= InstanceKlass:header_size()
72 // | vtable |
73 // +-----------------------+
74 // | other sections |
75 // +-----------------------+ <= end_of_instance_klass()
76 // | InlineKlass::Members |
77 // +-----------------------+
78 //
79 class Members {
80 friend class InlineKlass;
81
82 // Addresses used for inline type calling convention
83 Array<SigEntry>* _extended_sig;
84 Array<VMRegPair>* _return_regs;
85
86 address _pack_handler;
87 address _pack_handler_jobject;
88 address _unpack_handler;
89
90 int _null_reset_value_offset;
91 int _payload_offset; // offset of the beginning of the payload in a heap buffered instance
92 int _payload_size_in_bytes; // size of payload layout
93 int _payload_alignment; // alignment required for payload
94 int _null_free_non_atomic_size_in_bytes; // size of null-free non-atomic flat layout
95 int _null_free_non_atomic_alignment; // alignment requirement for null-free non-atomic layout
96 int _null_free_atomic_size_in_bytes; // size and alignment requirement for a null-free atomic layout, -1 if no atomic flat layout is possible
97 int _nullable_atomic_size_in_bytes; // size and alignment requirement for a nullable layout (always atomic), -1 if no nullable flat layout is possible
98 int _nullable_non_atomic_size_in_bytes; // size and alignment requirement for a nullable non-atomic layout, -1 if not available
99 int _null_marker_offset; // expressed as an offset from the beginning of the object for a heap buffered value
100 // payload_offset must be subtracted to get the offset from the beginning of the payload
101
102 Members();
103
104 void print_on(outputStream* st) const;
105 };
106
107 InlineKlass();
108
109 private:
110
111 // Constructor
112 InlineKlass(const ClassFileParser& parser);
113
114 // Calculates where the members are supposed to be placed
115 address calculate_members_address() const;
116
117 Members& members() {
118 assert(_adr_inline_klass_members != nullptr, "Should have been initialized");
119 return *reinterpret_cast<Members*>(_adr_inline_klass_members);
120 }
121
122 inline const Members& members() const {
123 InlineKlass* ik = const_cast<InlineKlass*>(this);
124 return const_cast<const Members&>(ik->members());
125 }
126
127 public:
128
129 bool is_empty_inline_type() const { return _misc_flags.is_empty_inline_type(); }
130 void set_is_empty_inline_type() { _misc_flags.set_is_empty_inline_type(true); }
131
132 // Members access functions
133
134 const Array<SigEntry>* extended_sig() const {return members()._extended_sig; }
135 void set_extended_sig(Array<SigEntry>* extended_sig) { members()._extended_sig = extended_sig; }
136
137 const Array<VMRegPair>* return_regs() const { return members()._return_regs; }
138 void set_return_regs(Array<VMRegPair>* return_regs) { members()._return_regs = return_regs; }
139
140 // pack and unpack handlers for inline types return
141
142 address pack_handler() const { return members()._pack_handler; }
143 void set_pack_handler(address pack_handler) { members()._pack_handler = pack_handler; }
144
145 address pack_handler_jobject() const { return members()._pack_handler_jobject; }
146 void set_pack_handler_jobject(address pack_handler_jobject) { members()._pack_handler_jobject = pack_handler_jobject; }
147
148 address unpack_handler() const { return members()._unpack_handler; }
149 void set_unpack_handler(address unpack_handler) { members()._unpack_handler = unpack_handler; }
150
151 int null_reset_value_offset() const {
152 int offset = members()._null_reset_value_offset;
153 assert(offset != 0, "must not be called if not initialized");
154 return offset;
155 }
156 void set_null_reset_value_offset(int offset) { members()._null_reset_value_offset = offset; }
157
158 int payload_offset() const {
159 int offset = members()._payload_offset;
160 assert(offset != 0, "Must be initialized before use");
161 return offset;
162 }
163 void set_payload_offset(int offset) { members()._payload_offset = offset; }
164
165 int payload_size_in_bytes() const { return members()._payload_size_in_bytes; }
166 void set_payload_size_in_bytes(int payload_size) { members()._payload_size_in_bytes = payload_size; }
167
168 int payload_alignment() const { return members()._payload_alignment; }
169 void set_payload_alignment(int alignment) { members()._payload_alignment = alignment; }
170
171 int null_free_non_atomic_size_in_bytes() const { return members()._null_free_non_atomic_size_in_bytes; }
172 void set_null_free_non_atomic_size_in_bytes(int size) { members()._null_free_non_atomic_size_in_bytes = size; }
173 bool has_null_free_non_atomic_layout() const { return null_free_non_atomic_size_in_bytes() != -1; }
174
175 int null_free_non_atomic_alignment() const { return members()._null_free_non_atomic_alignment; }
176 void set_null_free_non_atomic_alignment(int alignment) { members()._null_free_non_atomic_alignment = alignment; }
177
178 int null_free_atomic_size_in_bytes() const { return members()._null_free_atomic_size_in_bytes; }
179 void set_null_free_atomic_size_in_bytes(int size) { members()._null_free_atomic_size_in_bytes = size; }
180 bool has_null_free_atomic_layout() const { return null_free_atomic_size_in_bytes() != -1; }
181
182 int nullable_atomic_size_in_bytes() const { return members()._nullable_atomic_size_in_bytes; }
183 void set_nullable_atomic_size_in_bytes(int size) { members()._nullable_atomic_size_in_bytes = size; }
184 bool has_nullable_atomic_layout() const { return nullable_atomic_size_in_bytes() != -1; }
185
186 int nullable_non_atomic_size_in_bytes() const { return members()._nullable_non_atomic_size_in_bytes; }
187 void set_nullable_non_atomic_size_in_bytes(int size) { members()._nullable_non_atomic_size_in_bytes = size; }
188 bool has_nullable_non_atomic_layout() const { return nullable_non_atomic_size_in_bytes() != -1; }
189
190 int null_marker_offset() const { return members()._null_marker_offset; }
191 void set_null_marker_offset(int offset) { members()._null_marker_offset = offset; }
192 int null_marker_offset_in_payload() const { return null_marker_offset() - payload_offset(); }
193
194 bool supports_nullable_layouts() const {
195 return has_nullable_non_atomic_layout() || has_nullable_atomic_layout();
196 }
197
198 jbyte* null_marker_address(address payload) {
199 assert(supports_nullable_layouts(), " Must do");
200 return (jbyte*)payload + null_marker_offset_in_payload();
201 }
202
203 bool is_payload_marked_as_null(address payload) {
204 assert(supports_nullable_layouts(), " Must do");
205 return *null_marker_address(payload) == 0;
206 }
207
208 void mark_payload_as_non_null(address payload) {
209 assert(supports_nullable_layouts(), " Must do");
210 *null_marker_address(payload) = 1;
211 }
212
213 void mark_payload_as_null(address payload) {
214 assert(supports_nullable_layouts(), " Must do");
215 *null_marker_address(payload) = 0;
216 }
217
218 inline bool layout_has_null_marker(LayoutKind lk) const;
219
220 inline bool is_layout_supported(LayoutKind lk) const;
221
222 inline int layout_alignment(LayoutKind kind) const;
223 inline int layout_size_in_bytes(LayoutKind kind) const;
224
225 #if INCLUDE_CDS
226 void remove_unshareable_info() override;
227 #endif
228
229 private:
230 int collect_fields(GrowableArray<SigEntry>* sig, int base_off = 0, int null_marker_offset = -1);
231
232 void cleanup_blobs();
233
234 public:
235 // Type testing
236 bool is_inline_klass_slow() const override { return true; }
237
238 // Casting from Klass*
239
240 static InlineKlass* cast(Klass* k) {
241 return const_cast<InlineKlass*>(cast(const_cast<const Klass*>(k)));
242 }
243
244 static const InlineKlass* cast(const Klass* k) {
245 assert(k != nullptr, "k should not be null");
246 assert(k->is_inline_klass(), "cast to InlineKlass");
247 return static_cast<const InlineKlass*>(k);
248 }
249
250 // Allocates a stand alone value in the Java heap
251 // initialized to default value (cleared memory)
252 inlineOop allocate_instance(TRAPS);
253
254 address payload_addr(oop o) const;
255
256 bool maybe_flat_in_array();
257 bool is_always_flat_in_array();
258
259 bool contains_oops() const { return nonstatic_oop_map_count() > 0; }
260 int nonstatic_oop_count();
261
262 // oop iterate raw inline type data pointer (where oop_addr may not be an oop, but backing/array-element)
263 template <typename T, class OopClosureType>
264 inline void oop_iterate_specialized(const address oop_addr, OopClosureType* closure);
265
266 template <typename T, class OopClosureType>
267 inline void oop_iterate_specialized_bounded(const address oop_addr, OopClosureType* closure, uintptr_t lo, uintptr_t hi);
268
269 // calling convention support
270 void initialize_calling_convention(TRAPS);
271
272 bool can_be_passed_as_fields() const;
273 bool can_be_returned_as_fields(bool init = false) const;
274 void save_oop_fields(const RegisterMap& map, GrowableArray<Handle>& handles) const;
275 void restore_oop_results(RegisterMap& map, GrowableArray<Handle>& handles) const;
276 oop realloc_result(const RegisterMap& reg_map, const GrowableArray<Handle>& handles, TRAPS);
277 static InlineKlass* returned_inline_klass(const RegisterMap& reg_map, bool* return_oop = nullptr, Method* method = nullptr);
278
279 static ByteSize adr_members_offset() {
280 return InstanceKlass::adr_inline_klass_members_offset();
281 }
282
283 // pack and unpack handlers. Need to be loadable from generated code
284 // so at a fixed offset from the base of the klass pointer.
285 static ByteSize pack_handler_offset() {
286 return byte_offset_of(Members, _pack_handler);
287 }
288
289 static ByteSize pack_handler_jobject_offset() {
290 return byte_offset_of(Members, _pack_handler_jobject);
291 }
292
293 static ByteSize unpack_handler_offset() {
294 return byte_offset_of(Members, _unpack_handler);
295 }
296
297 static ByteSize null_reset_value_offset_offset() {
298 return byte_offset_of(Members, _null_reset_value_offset);
299 }
300
301 static ByteSize payload_offset_offset() {
302 return byte_offset_of(Members, _payload_offset);
303 }
304
305 static ByteSize null_marker_offset_offset() {
306 return byte_offset_of(Members, _null_marker_offset);
307 }
308
309 oop null_reset_value() const;
310 void set_null_reset_value(oop val);
311
312 void deallocate_contents(ClassLoaderData* loader_data);
313 static void cleanup(InlineKlass* ik) ;
314
315 void print_on(outputStream* st) const override;
316
317 // Verification
318 void verify_on(outputStream* st) override;
319 void oop_verify_on(oop obj, outputStream* st) override;
320 };
321
322 #endif // SHARE_VM_OOPS_INLINEKLASS_HPP