1 /*
2 * Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvm.h"
30 #include "memory/resourceArea.hpp"
31 #include "oops/array.hpp"
32 #include "oops/fieldStreams.inline.hpp"
33 #include "oops/inlineKlass.inline.hpp"
34 #include "oops/instanceKlass.inline.hpp"
35 #include "oops/instanceMirrorKlass.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "runtime/fieldDescriptor.inline.hpp"
38 #include "utilities/powerOfTwo.hpp"
39
40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
41 bool can_use_atomic_flat) {
42
43 // The can_use_atomic_flat argument indicates if an atomic flat layout can be used for this field.
44 // This argument will be false if the container is a loosely consistent value class. Using an atomic layout
45 // in a container that has no atomicity guarantee creates a risk to see this field's value be subject to
46 // tearing even if the field's class was declared atomic (non loosely consistent).
47
48 if (!UseFieldFlattening) {
49 return LayoutKind::REFERENCE;
50 }
51
52 if (field_info.field_flags().is_injected()) {
53 // don't flatten injected fields
54 return LayoutKind::REFERENCE;
55 }
56
57 if (field_info.access_flags().is_volatile()) {
58 // volatile is used as a keyword to prevent flattening
59 return LayoutKind::REFERENCE;
60 }
61
62 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
63 // field's type is not a known value class, using a reference
64 return LayoutKind::REFERENCE;
65 }
66
67 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
68 InlineKlass* vk = inline_field_info->klass();
69
70 if (field_info.field_flags().is_null_free_inline_type()) {
71 assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
72 if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
73 if (vk->is_naturally_atomic() && vk->has_null_free_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
74 return (vk->has_null_free_atomic_layout() && can_use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
75 } else {
76 return vk->has_null_free_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
77 }
78 } else {
79 // To preserve the consistency between the null-marker and the field content, the NULLABLE_NON_ATOMIC_FLAT
80 // can only be used in containers that have atomicity quarantees (can_use_atomic_flat argument set to true)
81 if (field_info.access_flags().is_strict() && field_info.access_flags().is_final() && can_use_atomic_flat) {
82 if (vk->has_nullable_non_atomic_layout()) return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
83 }
84 // Another special case where NULLABLE_NON_ATOMIC_FLAT can be used: nullable empty values, because the
85 // payload of those values contains only the null-marker
86 if (vk->is_empty_inline_type() && vk->has_nullable_non_atomic_layout()) {
87 return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
88 }
89 if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
90 return can_use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
91 } else {
92 return LayoutKind::REFERENCE;
93 }
94 }
95 }
96
97 static bool field_is_inlineable(FieldInfo fieldinfo, LayoutKind lk, Array<InlineLayoutInfo>* ili) {
98 if (fieldinfo.field_flags().is_null_free_inline_type()) {
99 // A null-free inline type is always inlineable
100 return true;
101 }
102
103 if (lk != LayoutKind::REFERENCE) {
104 // We've chosen a layout that isn't a normal reference
105 return true;
106 }
107
108 const int field_index = (int)fieldinfo.index();
109 if (!fieldinfo.field_flags().is_injected() &&
110 ili != nullptr &&
111 ili->adr_at(field_index)->klass() != nullptr &&
112 !ili->adr_at(field_index)->klass()->is_identity_class() &&
113 !ili->adr_at(field_index)->klass()->is_abstract()) {
114 // The field's klass is not an identity class or abstract
115 return true;
116 }
117
118 return false;
119 }
120
121 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
122 switch(kind) {
123 case LayoutKind::NULL_FREE_NON_ATOMIC_FLAT:
124 *size = vk->null_free_non_atomic_size_in_bytes();
125 *alignment = vk->null_free_non_atomic_alignment();
126 break;
127 case LayoutKind::NULL_FREE_ATOMIC_FLAT:
128 *size = vk->null_free_atomic_size_in_bytes();
129 *alignment = *size;
130 break;
131 case LayoutKind::NULLABLE_ATOMIC_FLAT:
132 *size = vk->nullable_atomic_size_in_bytes();
133 *alignment = *size;
134 break;
135 case LayoutKind::NULLABLE_NON_ATOMIC_FLAT:
136 *size = vk->nullable_non_atomic_size_in_bytes();
137 *alignment = vk->null_free_non_atomic_alignment();
138 break;
139 default:
140 ShouldNotReachHere();
141 }
142 }
143
144 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
145 _next_block(nullptr),
146 _prev_block(nullptr),
147 _inline_klass(nullptr),
148 _block_kind(kind),
149 _layout_kind(LayoutKind::UNKNOWN),
150 _offset(-1),
151 _alignment(1),
152 _size(size),
153 _field_index(-1) {
154 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
155 "Otherwise, should use the constructor with a field index argument");
156 assert(size > 0, "Sanity check");
157 }
158
159
160 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
161 _next_block(nullptr),
162 _prev_block(nullptr),
163 _inline_klass(nullptr),
164 _block_kind(kind),
165 _layout_kind(LayoutKind::UNKNOWN),
166 _offset(-1),
167 _alignment(alignment),
168 _size(size),
169 _field_index(index) {
170 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
171 "Other kind do not have a field index");
172 assert(size > 0, "Sanity check");
173 assert(alignment > 0, "Sanity check");
174 }
175
176 bool LayoutRawBlock::fit(int size, int alignment) {
177 int adjustment = 0;
178 if ((_offset % alignment) != 0) {
179 adjustment = alignment - (_offset % alignment);
180 }
181 return _size >= size + adjustment;
182 }
183
184 FieldGroup::FieldGroup(int contended_group) :
185 _next(nullptr),
186 _small_primitive_fields(nullptr),
187 _big_primitive_fields(nullptr),
188 _oop_fields(nullptr),
189 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
190 _oop_count(0) {}
191
192 void FieldGroup::add_primitive_field(int idx, BasicType type) {
193 int size = type2aelembytes(type);
194 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
195 if (size >= oopSize) {
196 add_to_big_primitive_list(block);
197 } else {
198 add_to_small_primitive_list(block);
199 }
200 }
201
202 void FieldGroup::add_oop_field(int idx) {
203 int size = type2aelembytes(T_OBJECT);
204 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
205 if (_oop_fields == nullptr) {
206 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
207 }
208 _oop_fields->append(block);
209 _oop_count++;
210 }
211
212 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
213 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
214 block->set_inline_klass(vk);
215 block->set_layout_kind(lk);
216 if (block->size() >= oopSize) {
217 add_to_big_primitive_list(block);
218 } else {
219 add_to_small_primitive_list(block);
220 }
221 }
222
223 void FieldGroup::sort_by_size() {
224 if (_small_primitive_fields != nullptr) {
225 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
226 }
227 if (_big_primitive_fields != nullptr) {
228 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
229 }
230 }
231
232 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
233 if (_small_primitive_fields == nullptr) {
234 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
235 }
236 _small_primitive_fields->append(block);
237 }
238
239 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
240 if (_big_primitive_fields == nullptr) {
241 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
242 }
243 _big_primitive_fields->append(block);
244 }
245
246 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
247 _field_info(field_info),
248 _inline_layout_info_array(inline_layout_info_array),
249 _cp(cp),
250 _blocks(nullptr),
251 _start(_blocks),
252 _last(_blocks),
253 _super_first_field_offset(-1),
254 _super_alignment(-1),
255 _super_min_align_required(-1),
256 _null_reset_value_offset(-1),
257 _acmp_maps_offset(-1),
258 _super_has_nonstatic_fields(false),
259 _has_inherited_fields(false) {}
260
261 void FieldLayout::initialize_static_layout() {
262 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
263 _blocks->set_offset(0);
264 _last = _blocks;
265 _start = _blocks;
266 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
267 // during bootstrapping, the size of the java.lang.Class is still not known when layout
268 // of static field is computed. Field offsets are fixed later when the size is known
269 // (see java_lang_Class::fixup_mirror())
270 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
271 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
272 _blocks->set_offset(0);
273 }
274 }
275
276 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
277 if (super_klass == nullptr) {
278 super_ends_with_oop = false;
279 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
280 _blocks->set_offset(0);
281 _last = _blocks;
282 _start = _blocks;
283 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
284 } else {
285 reconstruct_layout(super_klass, _super_has_nonstatic_fields, super_ends_with_oop);
286 fill_holes(super_klass);
287 if ((!super_klass->has_contended_annotations()) || !_super_has_nonstatic_fields) {
288 _start = _blocks; // start allocating fields from the first empty block
289 } else {
290 _start = _last; // append fields at the end of the reconstructed layout
291 }
292 }
293 }
294
295 LayoutRawBlock* FieldLayout::first_field_block() {
296 LayoutRawBlock* block = _blocks;
297 while (block != nullptr
298 && block->block_kind() != LayoutRawBlock::INHERITED
299 && block->block_kind() != LayoutRawBlock::REGULAR
300 && block->block_kind() != LayoutRawBlock::FLAT
301 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
302 block = block->next_block();
303 }
304 return block;
305 }
306
307 // Insert a set of fields into a layout.
308 // For each field, search for an empty slot able to fit the field
309 // (satisfying both size and alignment requirements), if none is found,
310 // add the field at the end of the layout.
311 // Fields cannot be inserted before the block specified in the "start" argument
312 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
313 if (list == nullptr) return;
314 if (start == nullptr) start = this->_start;
315 bool last_search_success = false;
316 int last_size = 0;
317 int last_alignment = 0;
318 for (int i = 0; i < list->length(); i ++) {
319 LayoutRawBlock* b = list->at(i);
320 LayoutRawBlock* cursor = nullptr;
321 LayoutRawBlock* candidate = nullptr;
322 // if start is the last block, just append the field
323 if (start == last_block()) {
324 candidate = last_block();
325 }
326 // Before iterating over the layout to find an empty slot fitting the field's requirements,
327 // check if the previous field had the same requirements and if the search for a fitting slot
328 // was successful. If the requirements were the same but the search failed, a new search will
329 // fail the same way, so just append the field at the of the layout.
330 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
331 candidate = last_block();
332 } else {
333 // Iterate over the layout to find an empty slot fitting the field's requirements
334 last_size = b->size();
335 last_alignment = b->alignment();
336 cursor = last_block()->prev_block();
337 assert(cursor != nullptr, "Sanity check");
338 last_search_success = true;
339
340 while (cursor != start) {
341 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
342 if (candidate == nullptr || cursor->size() < candidate->size()) {
343 candidate = cursor;
344 }
345 }
346 cursor = cursor->prev_block();
347 }
348 if (candidate == nullptr) {
349 candidate = last_block();
350 last_search_success = false;
351 }
352 assert(candidate != nullptr, "Candidate must not be null");
353 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
354 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
355 }
356 insert_field_block(candidate, b);
357 }
358 }
359
360 // Used for classes with hard coded field offsets, insert a field at the specified offset */
361 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
362 assert(block != nullptr, "Sanity check");
363 block->set_offset(offset);
364 if (start == nullptr) {
365 start = this->_start;
366 }
367 LayoutRawBlock* slot = start;
368 while (slot != nullptr) {
369 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
370 slot == _last){
371 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
372 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
373 if (slot->offset() < block->offset()) {
374 int adjustment = block->offset() - slot->offset();
375 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
376 insert(slot, adj);
377 }
378 insert(slot, block);
379 if (slot->size() == 0) {
380 remove(slot);
381 }
382 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
383 _field_info->adr_at(block->field_index())->set_offset(block->offset());
384 }
385 return;
386 }
387 slot = slot->next_block();
388 }
389 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
390 }
391
392 // The allocation logic uses a best fit strategy: the set of fields is allocated
393 // in the first empty slot big enough to contain the whole set ((including padding
394 // to fit alignment constraints).
395 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
396 if (list == nullptr) return;
397 if (start == nullptr) {
398 start = _start;
399 }
400 // This code assumes that if the first block is well aligned, the following
401 // blocks would naturally be well aligned (no need for adjustment)
402 int size = 0;
403 for (int i = 0; i < list->length(); i++) {
404 size += list->at(i)->size();
405 }
406
407 LayoutRawBlock* candidate = nullptr;
408 if (start == last_block()) {
409 candidate = last_block();
410 } else {
411 LayoutRawBlock* first = list->at(0);
412 candidate = last_block()->prev_block();
413 while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
414 if (candidate == start) {
415 candidate = last_block();
416 break;
417 }
418 candidate = candidate->prev_block();
419 }
420 assert(candidate != nullptr, "Candidate must not be null");
421 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
422 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
423 }
424
425 for (int i = 0; i < list->length(); i++) {
426 LayoutRawBlock* b = list->at(i);
427 insert_field_block(candidate, b);
428 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
429 }
430 }
431
432 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
433 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
434 if (slot->offset() % block->alignment() != 0) {
435 int adjustment = block->alignment() - (slot->offset() % block->alignment());
436 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
437 insert(slot, adj);
438 }
439 assert(block->size() >= block->size(), "Enough space must remain after adjustment");
440 insert(slot, block);
441 if (slot->size() == 0) {
442 remove(slot);
443 }
444 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
445 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
446 _field_info->adr_at(block->field_index())->set_offset(block->offset());
447 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
448 _null_reset_value_offset = block->offset();
449 }
450 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
451 _acmp_maps_offset = block->offset();
452 }
453 }
454 if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
455 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
456 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
457 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
458 }
459
460 return block;
461 }
462
463 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_nonstatic_fields, bool& ends_with_oop) {
464 has_nonstatic_fields = ends_with_oop = false;
465 if (ik->is_abstract() && !ik->is_identity_class()) {
466 _super_alignment = type2aelembytes(BasicType::T_LONG);
467 }
468 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
469 BasicType last_type;
470 int last_offset = -1;
471 while (ik != nullptr) {
472 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
473 BasicType type = Signature::basic_type(fs.signature());
474 // distinction between static and non-static fields is missing
475 if (fs.access_flags().is_static()) continue;
476 has_nonstatic_fields = true;
477 _has_inherited_fields = true;
478 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
479 _super_first_field_offset = fs.offset();
480 }
481 LayoutRawBlock* block;
482 if (fs.is_flat()) {
483 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
484 InlineKlass* vk = layout_info.klass();
485 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
486 vk->layout_size_in_bytes(layout_info.kind()),
487 vk->layout_alignment(layout_info.kind()));
488 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
489 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
490 } else {
491 int size = type2aelembytes(type);
492 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
493 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
494 // For primitive types, the alignment is equal to the size
495 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
496 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
497 }
498 if (fs.offset() > last_offset) {
499 last_offset = fs.offset();
500 last_type = type;
501 }
502 block->set_offset(fs.offset());
503 all_fields->append(block);
504 }
505 ik = ik->super() == nullptr ? nullptr : ik->super();
506 }
507 assert(last_offset == -1 || last_offset > 0, "Sanity");
508 if (last_offset > 0 &&
509 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
510 ends_with_oop = true;
511 }
512
513 all_fields->sort(LayoutRawBlock::compare_offset);
514 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
515 _blocks->set_offset(0);
516 _last = _blocks;
517 for(int i = 0; i < all_fields->length(); i++) {
518 LayoutRawBlock* b = all_fields->at(i);
519 _last->set_next_block(b);
520 b->set_prev_block(_last);
521 _last = b;
522 }
523 _start = _blocks;
524 }
525
526 // Called during the reconstruction of a layout, after fields from super
527 // classes have been inserted. It fills unused slots between inserted fields
528 // with EMPTY blocks, so the regular field insertion methods would work.
529 // This method handles classes with @Contended annotations differently
530 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
531 // fields to interfere with contended fields/classes.
532 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
533 assert(_blocks != nullptr, "Sanity check");
534 assert(_blocks->offset() == 0, "first block must be at offset zero");
535 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
536 LayoutRawBlock* b = _blocks;
537 while (b->next_block() != nullptr) {
538 if (b->next_block()->offset() > (b->offset() + b->size())) {
539 int size = b->next_block()->offset() - (b->offset() + b->size());
540 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
541 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
542 empty->set_offset(b->offset() + b->size());
543 empty->set_next_block(b->next_block());
544 b->next_block()->set_prev_block(empty);
545 b->set_next_block(empty);
546 empty->set_prev_block(b);
547 }
548 b = b->next_block();
549 }
550 assert(b->next_block() == nullptr, "Invariant at this point");
551 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
552 // If the super class has @Contended annotation, a padding block is
553 // inserted at the end to ensure that fields from the subclasses won't share
554 // the cache line of the last field of the contended class
555 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
556 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
557 p->set_offset(b->offset() + b->size());
558 b->set_next_block(p);
559 p->set_prev_block(b);
560 b = p;
561 }
562
563 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
564 last->set_offset(b->offset() + b->size());
565 assert(last->offset() > 0, "Sanity check");
566 b->set_next_block(last);
567 last->set_prev_block(b);
568 _last = last;
569 }
570
571 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
572 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
573 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
574 block->set_offset(slot->offset());
575 slot->set_offset(slot->offset() + block->size());
576 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
577 assert(slot->size() - block->size() >= 0, "no negative size allowed");
578 slot->set_size(slot->size() - block->size());
579 block->set_prev_block(slot->prev_block());
580 block->set_next_block(slot);
581 slot->set_prev_block(block);
582 if (block->prev_block() != nullptr) {
583 block->prev_block()->set_next_block(block);
584 }
585 if (_blocks == slot) {
586 _blocks = block;
587 }
588 if (_start == slot) {
589 _start = block;
590 }
591 return block;
592 }
593
594 void FieldLayout::remove(LayoutRawBlock* block) {
595 assert(block != nullptr, "Sanity check");
596 assert(block != _last, "Sanity check");
597 if (_blocks == block) {
598 _blocks = block->next_block();
599 if (_blocks != nullptr) {
600 _blocks->set_prev_block(nullptr);
601 }
602 } else {
603 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
604 block->prev_block()->set_next_block(block->next_block());
605 block->next_block()->set_prev_block(block->prev_block());
606 }
607 if (block == _start) {
608 _start = block->prev_block();
609 }
610 }
611
612 void FieldLayout::shift_fields(int shift) {
613 LayoutRawBlock* b = first_field_block();
614 LayoutRawBlock* previous = b->prev_block();
615 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
616 previous->set_size(previous->size() + shift);
617 } else {
618 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
619 nb->set_offset(b->offset());
620 previous->set_next_block(nb);
621 nb->set_prev_block(previous);
622 b->set_prev_block(nb);
623 nb->set_next_block(b);
624 }
625 while (b != nullptr) {
626 b->set_offset(b->offset() + shift);
627 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
628 _field_info->adr_at(b->field_index())->set_offset(b->offset());
629 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
630 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
631 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
632 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
633 }
634 }
635 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
636 b = b->next_block();
637 }
638 }
639
640 LayoutRawBlock* FieldLayout::find_null_marker() {
641 LayoutRawBlock* b = _blocks;
642 while (b != nullptr) {
643 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
644 return b;
645 }
646 b = b->next_block();
647 }
648 ShouldNotReachHere();
649 }
650
651 void FieldLayout::remove_null_marker() {
652 LayoutRawBlock* b = first_field_block();
653 while (b != nullptr) {
654 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
655 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
656 LayoutRawBlock* n = b->next_block();
657 remove(b);
658 n->set_offset(b->offset());
659 n->set_size(n->size() + b->size());
660 } else {
661 b->set_block_kind(LayoutRawBlock::EMPTY);
662 }
663 return;
664 }
665 b = b->next_block();
666 }
667 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
668 }
669
670 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields, bool dummy_field_is_reused_as_null_marker) {
671 ResourceMark rm;
672 LayoutRawBlock* b = _blocks;
673 while(b != _last) {
674 switch(b->block_kind()) {
675 case LayoutRawBlock::REGULAR: {
676 FieldInfo* fi = _field_info->adr_at(b->field_index());
677 output->print(" @%d %s %d/%d \"%s\" %s",
678 b->offset(),
679 "REGULAR",
680 b->size(),
681 b->alignment(),
682 fi->name(_cp)->as_C_string(),
683 fi->signature(_cp)->as_C_string());
684
685 if (dummy_field_is_reused_as_null_marker) {
686 const bool is_dummy_field = fi->name(_cp)->fast_compare(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(empty_marker_name))) == 0;
687 if (is_dummy_field) {
688 output->print(" (reused as null-marker)");
689 }
690 }
691
692 output->cr();
693 break;
694 }
695 case LayoutRawBlock::FLAT: {
696 FieldInfo* fi = _field_info->adr_at(b->field_index());
697 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
698 assert(ik != nullptr, "");
699 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
700 b->offset(),
701 "FLAT",
702 b->size(),
703 b->alignment(),
704 fi->name(_cp)->as_C_string(),
705 fi->signature(_cp)->as_C_string(),
706 ik->name()->as_C_string(),
707 ik->class_loader_data(),
708 LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
709 break;
710 }
711 case LayoutRawBlock::RESERVED: {
712 output->print_cr(" @%d %s %d/-",
713 b->offset(),
714 "RESERVED",
715 b->size());
716 break;
717 }
718 case LayoutRawBlock::INHERITED: {
719 assert(!is_static, "Static fields are not inherited in layouts");
720 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
721 bool found = false;
722 const InstanceKlass* ik = super;
723 while (!found && ik != nullptr) {
724 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
725 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
726 output->print_cr(" @%d %s %d/%d \"%s\" %s",
727 b->offset(),
728 "INHERITED",
729 b->size(),
730 b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
731 fs.name()->as_C_string(),
732 fs.signature()->as_C_string());
733 found = true;
734 break;
735 }
736 }
737 ik = ik->super();
738 }
739 break;
740 }
741 case LayoutRawBlock::EMPTY:
742 output->print_cr(" @%d %s %d/1",
743 b->offset(),
744 "EMPTY",
745 b->size());
746 break;
747 case LayoutRawBlock::PADDING:
748 output->print_cr(" @%d %s %d/1",
749 b->offset(),
750 "PADDING",
751 b->size());
752 break;
753 case LayoutRawBlock::NULL_MARKER:
754 {
755 output->print_cr(" @%d %s %d/1 ",
756 b->offset(),
757 "NULL_MARKER",
758 b->size());
759 break;
760 }
761 default:
762 fatal("Unknown block type");
763 }
764 b = b->next_block();
765 }
766 }
767
768 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
769 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
770 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
771 _classname(classname),
772 _loader_data(loader_data),
773 _super_klass(super_klass),
774 _constant_pool(constant_pool),
775 _field_info(field_info),
776 _info(info),
777 _inline_layout_info_array(inline_layout_info_array),
778 _root_group(nullptr),
779 _contended_groups(GrowableArray<FieldGroup*>(8)),
780 _static_fields(nullptr),
781 _layout(nullptr),
782 _static_layout(nullptr),
783 _nonstatic_oopmap_count(0),
784 _payload_alignment(-1),
785 _payload_offset(-1),
786 _null_marker_offset(-1),
787 _payload_size_in_bytes(-1),
788 _null_free_non_atomic_layout_size_in_bytes(-1),
789 _null_free_non_atomic_layout_alignment(-1),
790 _null_free_atomic_layout_size_in_bytes(-1),
791 _nullable_atomic_layout_size_in_bytes(-1),
792 _nullable_non_atomic_layout_size_in_bytes(-1),
793 _fields_size_sum(0),
794 _declared_nonstatic_fields_count(0),
795 _has_non_naturally_atomic_fields(false),
796 _is_naturally_atomic(false),
797 _must_be_atomic(must_be_atomic),
798 _has_nonstatic_fields(false),
799 _has_inlineable_fields(false),
800 _has_inlined_fields(false),
801 _is_contended(is_contended),
802 _is_inline_type(is_inline_type),
803 _is_abstract_value(is_abstract_value),
804 _is_empty_inline_class(false) {}
805
806 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
807 assert(g > 0, "must only be called for named contended groups");
808 FieldGroup* fg = nullptr;
809 for (int i = 0; i < _contended_groups.length(); i++) {
810 fg = _contended_groups.at(i);
811 if (fg->contended_group() == g) return fg;
812 }
813 fg = new FieldGroup(g);
814 _contended_groups.append(fg);
815 return fg;
816 }
817
818 void FieldLayoutBuilder::prologue() {
819 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
820 const InstanceKlass* super_klass = _super_klass;
821 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
822 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
823 if (super_klass != nullptr) {
824 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
825 }
826 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
827 _static_layout->initialize_static_layout();
828 _static_fields = new FieldGroup();
829 _root_group = new FieldGroup();
830 }
831
832 // Field sorting for regular (non-inline) classes:
833 // - fields are sorted in static and non-static fields
834 // - non-static fields are also sorted according to their contention group
835 // (support of the @Contended annotation)
836 // - @Contended annotation is ignored for static fields
837 // - field flattening decisions are taken in this method
838 void FieldLayoutBuilder::regular_field_sorting() {
839 int idx = 0;
840 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
841 FieldGroup* group = nullptr;
842 FieldInfo fieldinfo = *it;
843 if (fieldinfo.access_flags().is_static()) {
844 group = _static_fields;
845 } else {
846 _has_nonstatic_fields = true;
847 if (fieldinfo.field_flags().is_contended()) {
848 int g = fieldinfo.contended_group();
849 if (g == 0) {
850 group = new FieldGroup(true);
851 _contended_groups.append(group);
852 } else {
853 group = get_or_create_contended_group(g);
854 }
855 } else {
856 group = _root_group;
857 }
858 }
859 assert(group != nullptr, "invariant");
860 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
861 switch(type) {
862 case T_BYTE:
863 case T_CHAR:
864 case T_DOUBLE:
865 case T_FLOAT:
866 case T_INT:
867 case T_LONG:
868 case T_SHORT:
869 case T_BOOLEAN:
870 group->add_primitive_field(idx, type);
871 break;
872 case T_OBJECT:
873 case T_ARRAY:
874 {
875 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
876
877 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
878 _has_inlineable_fields = true;
879 }
880
881 if (lk == LayoutKind::REFERENCE) {
882 if (group != _static_fields) _nonstatic_oopmap_count++;
883 group->add_oop_field(idx);
884 } else {
885 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
886 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
887
888 const int field_index = (int)fieldinfo.index();
889 assert(_inline_layout_info_array != nullptr, "Array must have been created");
890 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
891 _has_inlined_fields = true;
892 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
893 int size, alignment;
894 get_size_and_alignment(vk, lk, &size, &alignment);
895 group->add_flat_field(idx, vk, lk, size, alignment);
896 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
897 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
898 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
899 _field_info->adr_at(idx)->set_layout_kind(lk);
900 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
901 }
902 break;
903 }
904 default:
905 fatal("Something wrong?");
906 }
907 }
908 _root_group->sort_by_size();
909 _static_fields->sort_by_size();
910 if (!_contended_groups.is_empty()) {
911 for (int i = 0; i < _contended_groups.length(); i++) {
912 _contended_groups.at(i)->sort_by_size();
913 }
914 }
915 }
916
917 /* Field sorting for inline classes:
918 * - because inline classes are immutable, the @Contended annotation is ignored
919 * when computing their layout (with only read operation, there's no false
920 * sharing issue)
921 * - this method also records the alignment of the field with the most
922 * constraining alignment, this value is then used as the alignment
923 * constraint when flattening this inline type into another container
924 * - field flattening decisions are taken in this method (those decisions are
925 * currently only based in the size of the fields to be flattened, the size
926 * of the resulting instance is not considered)
927 */
928 void FieldLayoutBuilder::inline_class_field_sorting() {
929 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
930 int alignment = -1;
931 int idx = 0;
932 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
933 FieldGroup* group = nullptr;
934 FieldInfo fieldinfo = *it;
935 int field_alignment = 1;
936 if (fieldinfo.access_flags().is_static()) {
937 group = _static_fields;
938 } else {
939 _has_nonstatic_fields = true;
940 _declared_nonstatic_fields_count++;
941 group = _root_group;
942 }
943 assert(group != nullptr, "invariant");
944 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
945 switch(type) {
946 case T_BYTE:
947 case T_CHAR:
948 case T_DOUBLE:
949 case T_FLOAT:
950 case T_INT:
951 case T_LONG:
952 case T_SHORT:
953 case T_BOOLEAN:
954 if (group != _static_fields) {
955 field_alignment = type2aelembytes(type); // alignment == size for primitive types
956 }
957 group->add_primitive_field(idx, type);
958 break;
959 case T_OBJECT:
960 case T_ARRAY:
961 {
962 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
963 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
964
965 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
966 _has_inlineable_fields = true;
967 }
968
969 if (lk == LayoutKind::REFERENCE) {
970 if (group != _static_fields) {
971 _nonstatic_oopmap_count++;
972 field_alignment = type2aelembytes(type); // alignment == size for oops
973 }
974 group->add_oop_field(idx);
975 } else {
976 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
977 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
978
979 const int field_index = (int)fieldinfo.index();
980 assert(_inline_layout_info_array != nullptr, "Array must have been created");
981 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
982 _has_inlined_fields = true;
983 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
984 if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
985 int size, alignment;
986 get_size_and_alignment(vk, lk, &size, &alignment);
987 group->add_flat_field(idx, vk, lk, size, alignment);
988 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
989 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
990 field_alignment = alignment;
991 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
992 _field_info->adr_at(idx)->set_layout_kind(lk);
993 }
994 break;
995 }
996 default:
997 fatal("Unexpected BasicType");
998 }
999 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
1000 }
1001 _payload_alignment = alignment;
1002 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
1003 }
1004
1005 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
1006 if (ContendedPaddingWidth > 0) {
1007 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
1008 _layout->insert(slot, padding);
1009 }
1010 }
1011
1012 // Computation of regular classes layout is an evolution of the previous default layout
1013 // (FieldAllocationStyle 1):
1014 // - primitive fields (both primitive types and flat inline types) are allocated
1015 // first (from the biggest to the smallest)
1016 // - oop fields are allocated, either in existing gaps or at the end of
1017 // the layout. We allocate oops in a single block to have a single oop map entry.
1018 // - if the super class ended with an oop, we lead with oops. That will cause the
1019 // trailing oop map entry of the super class and the oop map entry of this class
1020 // to be folded into a single entry later. Correspondingly, if the super class
1021 // ends with a primitive field, we gain nothing by leading with oops; therefore
1022 // we let oop fields trail, thus giving future derived classes the chance to apply
1023 // the same trick.
1024 void FieldLayoutBuilder::compute_regular_layout() {
1025 bool need_tail_padding = false;
1026 prologue();
1027 regular_field_sorting();
1028 if (_is_contended) {
1029 _layout->set_start(_layout->last_block());
1030 // insertion is currently easy because the current strategy doesn't try to fill holes
1031 // in super classes layouts => the _start block is by consequence the _last_block
1032 insert_contended_padding(_layout->start());
1033 need_tail_padding = true;
1034 }
1035
1036 if (_super_ends_with_oop) {
1037 _layout->add(_root_group->oop_fields());
1038 _layout->add(_root_group->big_primitive_fields());
1039 _layout->add(_root_group->small_primitive_fields());
1040 } else {
1041 _layout->add(_root_group->big_primitive_fields());
1042 _layout->add(_root_group->small_primitive_fields());
1043 _layout->add(_root_group->oop_fields());
1044 }
1045
1046 if (!_contended_groups.is_empty()) {
1047 for (int i = 0; i < _contended_groups.length(); i++) {
1048 FieldGroup* cg = _contended_groups.at(i);
1049 LayoutRawBlock* start = _layout->last_block();
1050 insert_contended_padding(start);
1051 _layout->add(cg->big_primitive_fields());
1052 _layout->add(cg->small_primitive_fields(), start);
1053 _layout->add(cg->oop_fields(), start);
1054 need_tail_padding = true;
1055 }
1056 }
1057
1058 if (need_tail_padding) {
1059 insert_contended_padding(_layout->last_block());
1060 }
1061
1062 // Warning: IntanceMirrorKlass expects static oops to be allocated first
1063 _static_layout->add_contiguously(_static_fields->oop_fields());
1064 _static_layout->add(_static_fields->big_primitive_fields());
1065 _static_layout->add(_static_fields->small_primitive_fields());
1066
1067 epilogue();
1068 }
1069
1070 /* Computation of inline classes has a slightly different strategy than for
1071 * regular classes. Regular classes have their oop fields allocated at the end
1072 * of the layout to increase GC performances. Unfortunately, this strategy
1073 * increases the number of empty slots inside an instance. Because the purpose
1074 * of inline classes is to be embedded into other containers, it is critical
1075 * to keep their size as small as possible. For this reason, the allocation
1076 * strategy is:
1077 * - big primitive fields (primitive types and flat inline type smaller
1078 * than an oop) are allocated first (from the biggest to the smallest)
1079 * - then oop fields
1080 * - then small primitive fields (from the biggest to the smallest)
1081 */
1082 void FieldLayoutBuilder::compute_inline_class_layout() {
1083
1084 // Test if the concrete inline class is an empty class (no instance fields)
1085 // and insert a dummy field if needed
1086 if (!_is_abstract_value) {
1087 bool declares_nonstatic_fields = false;
1088 for (FieldInfo fieldinfo : *_field_info) {
1089 if (!fieldinfo.access_flags().is_static()) {
1090 declares_nonstatic_fields = true;
1091 break;
1092 }
1093 }
1094
1095 if (!declares_nonstatic_fields) {
1096 bool has_inherited_fields = _super_klass != nullptr && _super_klass->has_nonstatic_fields();
1097 if (!has_inherited_fields) {
1098 // Inject ".empty" dummy field
1099 _is_empty_inline_class = true;
1100 FieldInfo::FieldFlags fflags(0);
1101 fflags.update_injected(true);
1102 AccessFlags aflags;
1103 FieldInfo fi(aflags,
1104 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1105 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1106 0,
1107 fflags);
1108 int idx = _field_info->append(fi);
1109 _field_info->adr_at(idx)->set_index(idx);
1110 }
1111 }
1112 }
1113
1114 prologue();
1115 inline_class_field_sorting();
1116
1117 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1118
1119 if (!_layout->super_has_nonstatic_fields()) {
1120 // No inherited fields, the layout must be empty except for the RESERVED block
1121 // PADDING is inserted if needed to ensure the correct alignment of the payload.
1122 if (_is_abstract_value && _has_nonstatic_fields) {
1123 // non-static fields of the abstract class must be laid out without knowning
1124 // the alignment constraints of the fields of the sub-classes, so the worst
1125 // case scenario is assumed, which is currently the alignment of T_LONG.
1126 // PADDING is added if needed to ensure the payload will respect this alignment.
1127 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1128 }
1129 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY, "Unexpected");
1130 LayoutRawBlock* first_empty = _layout->start()->next_block();
1131 if (first_empty->offset() % _payload_alignment != 0) {
1132 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1133 _layout->insert(first_empty, padding);
1134 if (first_empty->size() == 0) {
1135 _layout->remove(first_empty);
1136 }
1137 _layout->set_start(padding);
1138 }
1139 } else { // the class has inherited some fields from its super(s)
1140 if (!_is_abstract_value) {
1141 // This is the step where the layout of the final concrete value class' layout
1142 // is computed. Super abstract value classes might have been too conservative
1143 // regarding alignment constraints, but now that the full set of non-static fields is
1144 // known, compute which alignment to use, then set first allowed field offset
1145
1146 assert(_has_nonstatic_fields, "Concrete value classes must have at least one field");
1147 if (_payload_alignment == -1) { // current class declares no local nonstatic fields
1148 _payload_alignment = _layout->super_min_align_required();
1149 }
1150
1151 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1152 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1153
1154 if (_payload_alignment < _layout->super_alignment()) {
1155 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1156 assert(new_alignment % _payload_alignment == 0, "Must be");
1157 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1158 _payload_alignment = new_alignment;
1159 }
1160 _layout->set_start(_layout->first_field_block());
1161 }
1162 }
1163
1164 _layout->add(_root_group->big_primitive_fields());
1165 _layout->add(_root_group->oop_fields());
1166 _layout->add(_root_group->small_primitive_fields());
1167
1168 LayoutRawBlock* first_field = _layout->first_field_block();
1169 if (first_field != nullptr) {
1170 _payload_offset = _layout->first_field_block()->offset();
1171 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1172 } else {
1173 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1174 _payload_offset = _layout->blocks()->size();
1175 _payload_size_in_bytes = 0;
1176 }
1177
1178 // Determining if the value class is naturally atomic:
1179 if ((!_layout->super_has_nonstatic_fields() && _declared_nonstatic_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1180 || (_layout->super_has_nonstatic_fields() && _super_klass->is_naturally_atomic() && _declared_nonstatic_fields_count == 0)) {
1181 _is_naturally_atomic = true;
1182 }
1183
1184 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1185 // From this, additional layouts will be computed: atomic and nullable layouts
1186 // Once those additional layouts are computed, the raw layout might need some adjustments
1187
1188 bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1189
1190 if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1191 // Validation of the non atomic layout
1192 if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1193 _null_free_non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1194 _null_free_non_atomic_layout_alignment = _payload_alignment;
1195 }
1196
1197 // Next step is to compute the characteristics for a layout enabling atomic updates
1198 if (UseAtomicValueFlattening) {
1199 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1200 if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1201 _null_free_atomic_layout_size_in_bytes = atomic_size;
1202 }
1203 }
1204
1205 // Next step is the nullable layouts: they must include a null marker
1206 if (UseNullableValueFlattening || UseNullableNonAtomicValueFlattening) {
1207 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1208 LayoutRawBlock* b = _layout->first_field_block();
1209 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1210 int null_marker_offset = -1;
1211 if (_is_empty_inline_class) {
1212 // Reusing the dummy field as a field marker
1213 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1214 null_marker_offset = b->offset();
1215 } else {
1216 while (b != _layout->last_block()) {
1217 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1218 break;
1219 }
1220 b = b->next_block();
1221 }
1222 if (b != _layout->last_block()) {
1223 // found an empty slot, register its offset from the beginning of the payload
1224 null_marker_offset = b->offset();
1225 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1226 _layout->add_field_at_offset(marker, b->offset());
1227 }
1228 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1229 int last_offset = _layout->last_block()->offset();
1230 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1231 _layout->insert_field_block(_layout->last_block(), marker);
1232 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1233 null_marker_offset = marker->offset();
1234 }
1235 }
1236 assert(null_marker_offset != -1, "Sanity check");
1237 // Now that the null marker is there, the size of the nullable layout must computed
1238 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1239 if (UseNullableNonAtomicValueFlattening) {
1240 _nullable_non_atomic_layout_size_in_bytes = new_raw_size;
1241 _null_marker_offset = null_marker_offset;
1242 _null_free_non_atomic_layout_alignment = _payload_alignment;
1243 }
1244 if (UseNullableValueFlattening) {
1245 // For the nullable atomic layout, the size mut be compatible with the platform capabilities
1246 int nullable_atomic_size = round_up_power_of_2(new_raw_size);
1247 if (nullable_atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1248 _nullable_atomic_layout_size_in_bytes = nullable_atomic_size;
1249 _null_marker_offset = null_marker_offset;
1250 }
1251 }
1252 if (_null_marker_offset == -1) { // No nullable layout has been accepted
1253 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1254 // from the layout, otherwise it will appear anyway if the layout is printer
1255 if (!_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1256 _layout->remove_null_marker();
1257 }
1258 }
1259 }
1260 // If the inline class has an atomic or nullable atomic layout,
1261 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1262 // could remain simple (single instruction without intermediate copy). This might required
1263 // to shift all fields in the raw layout, but this operation is possible only if the class
1264 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1265 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1266 // and loosely consistent are supported.
1267 int required_alignment = _payload_alignment;
1268 if (has_null_free_atomic_layout() && required_alignment < null_free_atomic_layout_size_in_bytes()) {
1269 required_alignment = null_free_atomic_layout_size_in_bytes();
1270 }
1271 if (has_nullable_atomic_layout() && required_alignment < nullable_atomic_layout_size_in_bytes()) {
1272 required_alignment = nullable_atomic_layout_size_in_bytes();
1273 }
1274 int shift = first_field->offset() % required_alignment;
1275 if (shift != 0) {
1276 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1277 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1278 _layout->shift_fields(shift);
1279 _payload_offset = _layout->first_field_block()->offset();
1280 if (has_nullable_atomic_layout() || has_nullable_non_atomic_layout()) {
1281 assert(!_is_empty_inline_class, "Should not get here with empty values");
1282 _null_marker_offset = _layout->find_null_marker()->offset();
1283 }
1284 _payload_alignment = required_alignment;
1285 } else {
1286 _null_free_atomic_layout_size_in_bytes = -1;
1287 if (has_nullable_atomic_layout() && !has_nullable_non_atomic_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1288 _layout->remove_null_marker();
1289 _null_marker_offset = -1;
1290 }
1291 _nullable_atomic_layout_size_in_bytes = -1;
1292 }
1293 } else {
1294 _payload_alignment = required_alignment;
1295 }
1296
1297 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1298 // instances must also be the nullable layout, in order to be able to set the null marker to
1299 // non-null before copying the payload to other containers.
1300 if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_atomic_layout_size_in_bytes()) {
1301 _payload_size_in_bytes = nullable_atomic_layout_size_in_bytes();
1302 }
1303 if (has_nullable_non_atomic_layout() && payload_layout_size_in_bytes() < nullable_non_atomic_layout_size_in_bytes()) {
1304 _payload_size_in_bytes = nullable_non_atomic_layout_size_in_bytes();
1305 }
1306
1307 // if the inline class has a null-free atomic layout, the the layout used in heap allocated standalone
1308 // instances must have at least equal to the atomic layout to allow safe read/write atomic
1309 // operation
1310 if (has_null_free_atomic_layout() && payload_layout_size_in_bytes() < null_free_atomic_layout_size_in_bytes()) {
1311 _payload_size_in_bytes = null_free_atomic_layout_size_in_bytes();
1312 }
1313 }
1314 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1315 _static_layout->add_contiguously(_static_fields->oop_fields());
1316 _static_layout->add(_static_fields->big_primitive_fields());
1317 _static_layout->add(_static_fields->small_primitive_fields());
1318
1319 generate_acmp_maps();
1320 epilogue();
1321 }
1322
1323 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1324 InlineKlass* vklass, int offset) {
1325 int diff = offset - vklass->payload_offset();
1326 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1327 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1328 while (map < last_map) {
1329 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1330 map++;
1331 }
1332 }
1333
1334 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1335 if (list == nullptr) return;
1336 for (int i = 0; i < list->length(); i++) {
1337 LayoutRawBlock* f = list->at(i);
1338 if (f->block_kind() == LayoutRawBlock::FLAT) {
1339 InlineKlass* vk = f->inline_klass();
1340 assert(vk != nullptr, "Should have been initialized");
1341 if (vk->contains_oops()) {
1342 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1343 }
1344 }
1345 }
1346 }
1347
1348 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1349 if (group->oop_fields() != nullptr) {
1350 for (int i = 0; i < group->oop_fields()->length(); i++) {
1351 LayoutRawBlock* b = group->oop_fields()->at(i);
1352 nonstatic_oop_maps->add(b->offset(), 1);
1353 }
1354 }
1355 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1356 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1357 }
1358
1359 static int insert_segment(GrowableArray<Pair<int,int>>* map, int offset, int size, int last_idx) {
1360 if (map->is_empty()) {
1361 return map->append(Pair<int,int>(offset, size));
1362 }
1363 last_idx = last_idx == -1 ? 0 : last_idx;
1364 int start = map->adr_at(last_idx)->first > offset ? 0 : last_idx;
1365 bool inserted = false;
1366 for (int c = start; c < map->length(); c++) {
1367 if (offset == (map->adr_at(c)->first + map->adr_at(c)->second)) {
1368 //contiguous to the last field, can be coalesced
1369 map->adr_at(c)->second = map->adr_at(c)->second + size;
1370 inserted = true;
1371 break; // break out of the for loop
1372 }
1373 if (offset < (map->adr_at(c)->first)) {
1374 map->insert_before(c, Pair<int,int>(offset, size));
1375 last_idx = c;
1376 inserted = true;
1377 break; // break out of the for loop
1378 }
1379 }
1380 if (!inserted) {
1381 last_idx = map->append(Pair<int,int>(offset, size));
1382 }
1383 return last_idx;
1384 }
1385
1386 static int insert_map_at_offset(GrowableArray<Pair<int,int>>* nonoop_map, GrowableArray<int>* oop_map,
1387 const InstanceKlass* ik, int offset, int payload_offset, int last_idx) {
1388 oop mirror = ik->java_mirror();
1389 oop array = mirror->obj_field(ik->acmp_maps_offset());
1390 assert(array != nullptr, "Sanity check");
1391 typeArrayOop fmap = (typeArrayOop)array;
1392 typeArrayHandle fmap_h(Thread::current(), fmap);
1393 int nb_nonoop_field = fmap_h->int_at(0);
1394 int field_offset = offset - payload_offset;
1395 for (int i = 0; i < nb_nonoop_field; i++) {
1396 last_idx = insert_segment(nonoop_map,
1397 field_offset + fmap_h->int_at( i * 2 + 1),
1398 fmap_h->int_at( i * 2 + 2), last_idx);
1399 }
1400 int len = fmap_h->length();
1401 for (int i = nb_nonoop_field * 2 + 1; i < len; i++) {
1402 oop_map->append(field_offset + fmap_h->int_at(i));
1403 }
1404 return last_idx;
1405 }
1406
1407 static void split_after(GrowableArray<Pair<int,int>>* map, int idx, int head) {
1408 int offset = map->adr_at(idx)->first;
1409 int size = map->adr_at(idx)->second;
1410 if (size <= head) return;
1411 map->adr_at(idx)->first = offset + head;
1412 map->adr_at(idx)->second = size - head;
1413 map->insert_before(idx, Pair<int,int>(offset, head));
1414
1415 }
1416
1417 void FieldLayoutBuilder::generate_acmp_maps() {
1418 assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1419
1420 // create/initialize current class' maps
1421 // The Pair<int,int> values in the nonoop_acmp_map represent <offset,size> segments of memory
1422 _nonoop_acmp_map = new GrowableArray<Pair<int,int>>();
1423 _oop_acmp_map = new GrowableArray<int>();
1424 if (_is_empty_inline_class) return;
1425 // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1426 // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1427 // next to the previous insertion. However, in some cases local fields and inherited fields can be
1428 // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1429 int last_idx = -1;
1430 if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) { // Assumes j.l.Object cannot have fields
1431 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, 0, last_idx);
1432 }
1433
1434 // Processing local fields
1435 LayoutRawBlock* b = _layout->blocks();
1436 while(b != _layout->last_block()) {
1437 switch(b->block_kind()) {
1438 case LayoutRawBlock::RESERVED:
1439 case LayoutRawBlock::EMPTY:
1440 case LayoutRawBlock::PADDING:
1441 case LayoutRawBlock::NULL_MARKER:
1442 case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1443 // skip
1444 break;
1445
1446 case LayoutRawBlock::REGULAR:
1447 {
1448 FieldInfo* fi = _field_info->adr_at(b->field_index());
1449 if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1450 _oop_acmp_map->append(b->offset());
1451 } else {
1452 // Non-oop case
1453 last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1454 }
1455 break;
1456 }
1457 case LayoutRawBlock::FLAT:
1458 {
1459 InlineKlass* vk = b->inline_klass();
1460 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, b->offset(), vk->payload_offset(), last_idx);
1461 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1462 int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1463 last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1464 // Important note: the implementation assumes that for nullable flat fields, if the
1465 // null marker is zero (field is null), then all the fields of the flat field are also
1466 // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1467 // all fields are included in the map, plus the null marker
1468 // If it happens that the assumption above is wrong, then nullable flat fields would
1469 // require a dedicated section in the acmp map, and be handled differently: null_marker
1470 // comparison first, and if null markers are identical and non-zero, then conditional
1471 // comparison of the other fields
1472 }
1473 }
1474 break;
1475
1476 }
1477 b = b->next_block();
1478 }
1479
1480 // split segments into well-aligned blocks
1481 int idx = 0;
1482 while (idx < _nonoop_acmp_map->length()) {
1483 int offset = _nonoop_acmp_map->adr_at(idx)->first;
1484 int size = _nonoop_acmp_map->adr_at(idx)->second;
1485 int mod = offset % 8;
1486 switch (mod) {
1487 case 0:
1488 break;
1489 case 4:
1490 split_after(_nonoop_acmp_map, idx, 4);
1491 break;
1492 case 2:
1493 case 6:
1494 split_after(_nonoop_acmp_map, idx, 2);
1495 break;
1496 case 1:
1497 case 3:
1498 case 5:
1499 case 7:
1500 split_after(_nonoop_acmp_map, idx, 1);
1501 break;
1502 default:
1503 ShouldNotReachHere();
1504 }
1505 idx++;
1506 }
1507 }
1508
1509 void FieldLayoutBuilder::epilogue() {
1510 // Computing oopmaps
1511 OopMapBlocksBuilder* nonstatic_oop_maps =
1512 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1513 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1514 if (super_oop_map_count > 0) {
1515 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1516 _super_klass->nonstatic_oop_map_count());
1517 }
1518 register_embedded_oops(nonstatic_oop_maps, _root_group);
1519 if (!_contended_groups.is_empty()) {
1520 for (int i = 0; i < _contended_groups.length(); i++) {
1521 FieldGroup* cg = _contended_groups.at(i);
1522 if (cg->oop_count() > 0) {
1523 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1524 register_embedded_oops(nonstatic_oop_maps, cg);
1525 }
1526 }
1527 }
1528 nonstatic_oop_maps->compact();
1529
1530 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1531 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1532 int static_fields_size = (static_fields_end -
1533 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1534 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1535
1536 // Pass back information needed for InstanceKlass creation
1537
1538 _info->oop_map_blocks = nonstatic_oop_maps;
1539 _info->_instance_size = align_object_size(instance_end / wordSize);
1540 _info->_static_field_size = static_fields_size;
1541 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1542 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1543 _info->_has_inlined_fields = _has_inlined_fields;
1544 _info->_is_naturally_atomic = _is_naturally_atomic;
1545 if (_is_inline_type) {
1546 _info->_must_be_atomic = _must_be_atomic;
1547 _info->_payload_alignment = _payload_alignment;
1548 _info->_payload_offset = _payload_offset;
1549 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1550 _info->_null_free_non_atomic_size_in_bytes = _null_free_non_atomic_layout_size_in_bytes;
1551 _info->_null_free_non_atomic_alignment = _null_free_non_atomic_layout_alignment;
1552 _info->_null_free_atomic_layout_size_in_bytes = _null_free_atomic_layout_size_in_bytes;
1553 _info->_nullable_atomic_layout_size_in_bytes = _nullable_atomic_layout_size_in_bytes;
1554 _info->_nullable_non_atomic_layout_size_in_bytes = _nullable_non_atomic_layout_size_in_bytes;
1555 _info->_null_marker_offset = _null_marker_offset;
1556 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1557 _info->_is_empty_inline_klass = _is_empty_inline_class;
1558 }
1559
1560 // Acmp maps are needed for both concrete and abstract value classes
1561 if (_is_inline_type || _is_abstract_value) {
1562 _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1563 _info->_nonoop_acmp_map = _nonoop_acmp_map;
1564 _info->_oop_acmp_map = _oop_acmp_map;
1565 }
1566
1567 // This may be too restrictive, since if all the fields fit in 64
1568 // bits we could make the decision to align instances of this class
1569 // to 64-bit boundaries, and load and store them as single words.
1570 // And on machines which supported larger atomics we could similarly
1571 // allow larger values to be atomic, if properly aligned.
1572
1573 #ifdef ASSERT
1574 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1575 // which prints the details of LayoutRawBlocks used to compute the layout.
1576 // The code below checks that offsets in the _field_info meta-data match offsets
1577 // in the LayoutRawBlocks
1578 LayoutRawBlock* b = _layout->blocks();
1579 while(b != _layout->last_block()) {
1580 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1581 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1582 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1583 }
1584 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1585 }
1586 b = b->next_block();
1587 }
1588 b = _static_layout->blocks();
1589 while(b != _static_layout->last_block()) {
1590 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1591 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1592 }
1593 b = b->next_block();
1594 }
1595 #endif // ASSERT
1596
1597 static bool first_layout_print = true;
1598
1599
1600 if (PrintFieldLayout || (PrintInlineLayout && (_has_inlineable_fields || _is_inline_type || _is_abstract_value))) {
1601 ResourceMark rm;
1602 stringStream st;
1603 if (first_layout_print) {
1604 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1605 st.print_cr("Heap oop size = %d", heapOopSize);
1606 first_layout_print = false;
1607 }
1608 if (_super_klass != nullptr) {
1609 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1610 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1611 } else {
1612 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1613 }
1614 st.print_cr("Instance fields:");
1615 const bool dummy_field_is_reused_as_null_marker = _is_empty_inline_class && _null_marker_offset != -1;
1616 _layout->print(&st, false, _super_klass, _inline_layout_info_array, dummy_field_is_reused_as_null_marker);
1617 st.print_cr("Static fields:");
1618 _static_layout->print(&st, true, nullptr, _inline_layout_info_array, false);
1619 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1620 if (_is_inline_type) {
1621 st.print_cr("First field offset = %d", _payload_offset);
1622 st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1623 _payload_size_in_bytes, _payload_alignment);
1624 if (has_null_free_non_atomic_flat_layout()) {
1625 st.print_cr("%s layout: %d/%d",
1626 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1627 _null_free_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1628 } else {
1629 st.print_cr("%s layout: -/-",
1630 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1631 }
1632 if (has_null_free_atomic_layout()) {
1633 st.print_cr("%s layout: %d/%d",
1634 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1635 _null_free_atomic_layout_size_in_bytes, _null_free_atomic_layout_size_in_bytes);
1636 } else {
1637 st.print_cr("%s layout: -/-",
1638 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1639 }
1640 if (has_nullable_atomic_layout()) {
1641 st.print_cr("%s layout: %d/%d",
1642 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1643 _nullable_atomic_layout_size_in_bytes, _nullable_atomic_layout_size_in_bytes);
1644 } else {
1645 st.print_cr("%s layout: -/-",
1646 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1647 }
1648 if (has_nullable_non_atomic_layout()) {
1649 st.print_cr("%s layout: %d/%d",
1650 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT),
1651 _nullable_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1652 } else {
1653 st.print_cr("%s layout: -/-",
1654 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT));
1655 }
1656 if (_null_marker_offset != -1) {
1657 st.print_cr("Null marker offset = %d", _null_marker_offset);
1658 }
1659 st.print("Non-oop acmp map: ");
1660 for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1661 st.print("<%d,%d>, ", _nonoop_acmp_map->at(i).first, _nonoop_acmp_map->at(i).second);
1662 }
1663 st.print_cr("");
1664 st.print("oop acmp map: ");
1665 for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1666 st.print("%d, ", _oop_acmp_map->at(i));
1667 }
1668 st.print_cr("");
1669 }
1670 st.print_cr("---");
1671 // Print output all together.
1672 tty->print_raw(st.as_string());
1673 }
1674 }
1675
1676 void FieldLayoutBuilder::build_layout() {
1677 if (_is_inline_type || _is_abstract_value) {
1678 compute_inline_class_layout();
1679 } else {
1680 compute_regular_layout();
1681 }
1682 }