1 /*
2 * Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvm.h"
30 #include "memory/resourceArea.hpp"
31 #include "oops/array.hpp"
32 #include "oops/fieldStreams.inline.hpp"
33 #include "oops/inlineKlass.inline.hpp"
34 #include "oops/instanceKlass.inline.hpp"
35 #include "oops/instanceMirrorKlass.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "runtime/fieldDescriptor.inline.hpp"
38 #include "utilities/powerOfTwo.hpp"
39
40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
41 bool can_use_atomic_flat) {
42
43 // The can_use_atomic_flat argument indicates if an atomic flat layout can be used for this field.
44 // This argument will be false if the container is a loosely consistent value class. Using an atomic layout
45 // in a container that has no atomicity guarantee creates a risk to see this field's value be subject to
46 // tearing even if the field's class was declared atomic (non loosely consistent).
47
48 if (!UseFieldFlattening) {
49 return LayoutKind::REFERENCE;
50 }
51
52 if (field_info.field_flags().is_injected()) {
53 // don't flatten injected fields
54 return LayoutKind::REFERENCE;
55 }
56
57 if (field_info.access_flags().is_volatile()) {
58 // volatile is used as a keyword to prevent flattening
59 return LayoutKind::REFERENCE;
60 }
61
62 if (field_info.access_flags().is_static()) {
63 assert(inline_layout_info_array == nullptr ||
64 inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr,
65 "Static fields do not have inline layout info");
66 // don't flatten static fields
67 return LayoutKind::REFERENCE;
68 }
69
70 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
71 // field's type is not a known value class, using a reference
72 return LayoutKind::REFERENCE;
73 }
74
75 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
76 InlineKlass* vk = inline_field_info->klass();
77
78 if (field_info.field_flags().is_null_free_inline_type()) {
79 assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
80 if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
81 if (vk->is_naturally_atomic() && vk->has_null_free_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
82 return (vk->has_null_free_atomic_layout() && can_use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
83 } else {
84 return vk->has_null_free_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
85 }
86 } else {
87 // To preserve the consistency between the null-marker and the field content, the NULLABLE_NON_ATOMIC_FLAT
88 // can only be used in containers that have atomicity quarantees (can_use_atomic_flat argument set to true)
89 if (field_info.access_flags().is_strict() && field_info.access_flags().is_final() && can_use_atomic_flat) {
90 if (vk->has_nullable_non_atomic_layout()) return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
91 }
92 // Another special case where NULLABLE_NON_ATOMIC_FLAT can be used: nullable empty values, because the
93 // payload of those values contains only the null-marker
94 if (vk->is_empty_inline_type() && vk->has_nullable_non_atomic_layout()) {
95 return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
96 }
97 if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
98 return can_use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
99 } else {
100 return LayoutKind::REFERENCE;
101 }
102 }
103 }
104
105 static bool field_is_inlineable(FieldInfo fieldinfo, LayoutKind lk, Array<InlineLayoutInfo>* ili) {
106 if (fieldinfo.field_flags().is_null_free_inline_type()) {
107 // A null-free inline type is always inlineable
108 return true;
109 }
110
111 if (lk != LayoutKind::REFERENCE) {
112 assert(lk != LayoutKind::BUFFERED, "Sanity check");
113 assert(lk != LayoutKind::UNKNOWN, "Sanity check");
114 // We've chosen a layout that isn't a normal reference
115 return true;
116 }
117
118 const int field_index = (int)fieldinfo.index();
119 if (!fieldinfo.field_flags().is_injected() &&
120 ili != nullptr &&
121 ili->adr_at(field_index)->klass() != nullptr &&
122 !ili->adr_at(field_index)->klass()->is_identity_class() &&
123 !ili->adr_at(field_index)->klass()->is_abstract()) {
124 // The field's klass is not an identity class or abstract
125 return true;
126 }
127
128 return false;
129 }
130
131 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
132 switch(kind) {
133 case LayoutKind::NULL_FREE_NON_ATOMIC_FLAT:
134 *size = vk->null_free_non_atomic_size_in_bytes();
135 *alignment = vk->null_free_non_atomic_alignment();
136 break;
137 case LayoutKind::NULL_FREE_ATOMIC_FLAT:
138 *size = vk->null_free_atomic_size_in_bytes();
139 *alignment = *size;
140 break;
141 case LayoutKind::NULLABLE_ATOMIC_FLAT:
142 *size = vk->nullable_atomic_size_in_bytes();
143 *alignment = *size;
144 break;
145 case LayoutKind::NULLABLE_NON_ATOMIC_FLAT:
146 *size = vk->nullable_non_atomic_size_in_bytes();
147 *alignment = vk->null_free_non_atomic_alignment();
148 break;
149 default:
150 ShouldNotReachHere();
151 }
152 }
153
154 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
155 _next_block(nullptr),
156 _prev_block(nullptr),
157 _inline_klass(nullptr),
158 _block_kind(kind),
159 _layout_kind(LayoutKind::UNKNOWN),
160 _offset(-1),
161 _alignment(1),
162 _size(size),
163 _field_index(-1) {
164 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
165 "Otherwise, should use the constructor with a field index argument");
166 assert(size > 0, "Sanity check");
167 }
168
169
170 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
171 _next_block(nullptr),
172 _prev_block(nullptr),
173 _inline_klass(nullptr),
174 _block_kind(kind),
175 _layout_kind(LayoutKind::UNKNOWN),
176 _offset(-1),
177 _alignment(alignment),
178 _size(size),
179 _field_index(index) {
180 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
181 "Other kind do not have a field index");
182 assert(size > 0, "Sanity check");
183 assert(alignment > 0, "Sanity check");
184 }
185
186 bool LayoutRawBlock::fit(int size, int alignment) {
187 int adjustment = 0;
188 if ((_offset % alignment) != 0) {
189 adjustment = alignment - (_offset % alignment);
190 }
191 return _size >= size + adjustment;
192 }
193
194 FieldGroup::FieldGroup(int contended_group) :
195 _next(nullptr),
196 _small_primitive_fields(nullptr),
197 _big_primitive_fields(nullptr),
198 _oop_fields(nullptr),
199 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
200 _oop_count(0) {}
201
202 void FieldGroup::add_primitive_field(int idx, BasicType type) {
203 int size = type2aelembytes(type);
204 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
205 if (size >= oopSize) {
206 add_to_big_primitive_list(block);
207 } else {
208 add_to_small_primitive_list(block);
209 }
210 }
211
212 void FieldGroup::add_oop_field(int idx) {
213 int size = type2aelembytes(T_OBJECT);
214 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
215 if (_oop_fields == nullptr) {
216 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
217 }
218 _oop_fields->append(block);
219 _oop_count++;
220 }
221
222 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
223 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
224 block->set_inline_klass(vk);
225 block->set_layout_kind(lk);
226 if (block->size() >= oopSize) {
227 add_to_big_primitive_list(block);
228 } else {
229 add_to_small_primitive_list(block);
230 }
231 }
232
233 void FieldGroup::sort_by_size() {
234 if (_small_primitive_fields != nullptr) {
235 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
236 }
237 if (_big_primitive_fields != nullptr) {
238 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
239 }
240 }
241
242 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
243 if (_small_primitive_fields == nullptr) {
244 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
245 }
246 _small_primitive_fields->append(block);
247 }
248
249 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
250 if (_big_primitive_fields == nullptr) {
251 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
252 }
253 _big_primitive_fields->append(block);
254 }
255
256 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
257 _field_info(field_info),
258 _inline_layout_info_array(inline_layout_info_array),
259 _cp(cp),
260 _blocks(nullptr),
261 _start(_blocks),
262 _last(_blocks),
263 _super_first_field_offset(-1),
264 _super_alignment(-1),
265 _super_min_align_required(-1),
266 _null_reset_value_offset(-1),
267 _acmp_maps_offset(-1),
268 _super_has_nonstatic_fields(false),
269 _has_inherited_fields(false) {}
270
271 void FieldLayout::initialize_static_layout() {
272 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
273 _blocks->set_offset(0);
274 _last = _blocks;
275 _start = _blocks;
276 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
277 // during bootstrapping, the size of the java.lang.Class is still not known when layout
278 // of static field is computed. Field offsets are fixed later when the size is known
279 // (see java_lang_Class::fixup_mirror())
280 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
281 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
282 _blocks->set_offset(0);
283 }
284 }
285
286 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
287 if (super_klass == nullptr) {
288 super_ends_with_oop = false;
289 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
290 _blocks->set_offset(0);
291 _last = _blocks;
292 _start = _blocks;
293 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
294 } else {
295 reconstruct_layout(super_klass, _super_has_nonstatic_fields, super_ends_with_oop);
296 fill_holes(super_klass);
297 if ((!super_klass->has_contended_annotations()) || !_super_has_nonstatic_fields) {
298 _start = _blocks; // start allocating fields from the first empty block
299 } else {
300 _start = _last; // append fields at the end of the reconstructed layout
301 }
302 }
303 }
304
305 LayoutRawBlock* FieldLayout::first_field_block() {
306 LayoutRawBlock* block = _blocks;
307 while (block != nullptr
308 && block->block_kind() != LayoutRawBlock::INHERITED
309 && block->block_kind() != LayoutRawBlock::REGULAR
310 && block->block_kind() != LayoutRawBlock::FLAT
311 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
312 block = block->next_block();
313 }
314 return block;
315 }
316
317 // Insert a set of fields into a layout.
318 // For each field, search for an empty slot able to fit the field
319 // (satisfying both size and alignment requirements), if none is found,
320 // add the field at the end of the layout.
321 // Fields cannot be inserted before the block specified in the "start" argument
322 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
323 if (list == nullptr) return;
324 if (start == nullptr) start = this->_start;
325 bool last_search_success = false;
326 int last_size = 0;
327 int last_alignment = 0;
328 for (int i = 0; i < list->length(); i ++) {
329 LayoutRawBlock* b = list->at(i);
330 LayoutRawBlock* cursor = nullptr;
331 LayoutRawBlock* candidate = nullptr;
332 // if start is the last block, just append the field
333 if (start == last_block()) {
334 candidate = last_block();
335 }
336 // Before iterating over the layout to find an empty slot fitting the field's requirements,
337 // check if the previous field had the same requirements and if the search for a fitting slot
338 // was successful. If the requirements were the same but the search failed, a new search will
339 // fail the same way, so just append the field at the of the layout.
340 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
341 candidate = last_block();
342 } else {
343 // Iterate over the layout to find an empty slot fitting the field's requirements
344 last_size = b->size();
345 last_alignment = b->alignment();
346 cursor = last_block()->prev_block();
347 assert(cursor != nullptr, "Sanity check");
348 last_search_success = true;
349
350 while (cursor != start) {
351 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
352 if (candidate == nullptr || cursor->size() < candidate->size()) {
353 candidate = cursor;
354 }
355 }
356 cursor = cursor->prev_block();
357 }
358 if (candidate == nullptr) {
359 candidate = last_block();
360 last_search_success = false;
361 }
362 assert(candidate != nullptr, "Candidate must not be null");
363 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
364 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
365 }
366 insert_field_block(candidate, b);
367 }
368 }
369
370 // Used for classes with hard coded field offsets, insert a field at the specified offset */
371 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
372 assert(block != nullptr, "Sanity check");
373 block->set_offset(offset);
374 if (start == nullptr) {
375 start = this->_start;
376 }
377 LayoutRawBlock* slot = start;
378 while (slot != nullptr) {
379 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
380 slot == _last){
381 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
382 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
383 if (slot->offset() < block->offset()) {
384 int adjustment = block->offset() - slot->offset();
385 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
386 insert(slot, adj);
387 }
388 insert(slot, block);
389 if (slot->size() == 0) {
390 remove(slot);
391 }
392 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
393 _field_info->adr_at(block->field_index())->set_offset(block->offset());
394 }
395 return;
396 }
397 slot = slot->next_block();
398 }
399 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
400 }
401
402 // The allocation logic uses a best fit strategy: the set of fields is allocated
403 // in the first empty slot big enough to contain the whole set ((including padding
404 // to fit alignment constraints).
405 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
406 if (list == nullptr) return;
407 if (start == nullptr) {
408 start = _start;
409 }
410 // This code assumes that if the first block is well aligned, the following
411 // blocks would naturally be well aligned (no need for adjustment)
412 int size = 0;
413 for (int i = 0; i < list->length(); i++) {
414 size += list->at(i)->size();
415 }
416
417 LayoutRawBlock* candidate = nullptr;
418 if (start == last_block()) {
419 candidate = last_block();
420 } else {
421 LayoutRawBlock* first = list->at(0);
422 candidate = last_block()->prev_block();
423 while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
424 if (candidate == start) {
425 candidate = last_block();
426 break;
427 }
428 candidate = candidate->prev_block();
429 }
430 assert(candidate != nullptr, "Candidate must not be null");
431 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
432 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
433 }
434
435 for (int i = 0; i < list->length(); i++) {
436 LayoutRawBlock* b = list->at(i);
437 insert_field_block(candidate, b);
438 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
439 }
440 }
441
442 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
443 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
444 if (slot->offset() % block->alignment() != 0) {
445 int adjustment = block->alignment() - (slot->offset() % block->alignment());
446 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
447 insert(slot, adj);
448 }
449 assert(block->size() >= block->size(), "Enough space must remain after adjustment");
450 insert(slot, block);
451 if (slot->size() == 0) {
452 remove(slot);
453 }
454 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
455 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
456 _field_info->adr_at(block->field_index())->set_offset(block->offset());
457 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
458 _null_reset_value_offset = block->offset();
459 }
460 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
461 _acmp_maps_offset = block->offset();
462 }
463 }
464 if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
465 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
466 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
467 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
468 }
469
470 return block;
471 }
472
473 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_nonstatic_fields, bool& ends_with_oop) {
474 has_nonstatic_fields = ends_with_oop = false;
475 if (ik->is_abstract() && !ik->is_identity_class()) {
476 _super_alignment = type2aelembytes(BasicType::T_LONG);
477 }
478 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
479 BasicType last_type;
480 int last_offset = -1;
481 while (ik != nullptr) {
482 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
483 BasicType type = Signature::basic_type(fs.signature());
484 // distinction between static and non-static fields is missing
485 if (fs.access_flags().is_static()) continue;
486 has_nonstatic_fields = true;
487 _has_inherited_fields = true;
488 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
489 _super_first_field_offset = fs.offset();
490 }
491 LayoutRawBlock* block;
492 if (fs.is_flat()) {
493 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
494 InlineKlass* vk = layout_info.klass();
495 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
496 vk->layout_size_in_bytes(layout_info.kind()),
497 vk->layout_alignment(layout_info.kind()));
498 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
499 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
500 } else {
501 int size = type2aelembytes(type);
502 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
503 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
504 // For primitive types, the alignment is equal to the size
505 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
506 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
507 }
508 if (fs.offset() > last_offset) {
509 last_offset = fs.offset();
510 last_type = type;
511 }
512 block->set_offset(fs.offset());
513 all_fields->append(block);
514 }
515 ik = ik->super() == nullptr ? nullptr : ik->super();
516 }
517 assert(last_offset == -1 || last_offset > 0, "Sanity");
518 if (last_offset > 0 &&
519 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
520 ends_with_oop = true;
521 }
522
523 all_fields->sort(LayoutRawBlock::compare_offset);
524 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
525 _blocks->set_offset(0);
526 _last = _blocks;
527 for(int i = 0; i < all_fields->length(); i++) {
528 LayoutRawBlock* b = all_fields->at(i);
529 _last->set_next_block(b);
530 b->set_prev_block(_last);
531 _last = b;
532 }
533 _start = _blocks;
534 }
535
536 // Called during the reconstruction of a layout, after fields from super
537 // classes have been inserted. It fills unused slots between inserted fields
538 // with EMPTY blocks, so the regular field insertion methods would work.
539 // This method handles classes with @Contended annotations differently
540 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
541 // fields to interfere with contended fields/classes.
542 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
543 assert(_blocks != nullptr, "Sanity check");
544 assert(_blocks->offset() == 0, "first block must be at offset zero");
545 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
546 LayoutRawBlock* b = _blocks;
547 while (b->next_block() != nullptr) {
548 if (b->next_block()->offset() > (b->offset() + b->size())) {
549 int size = b->next_block()->offset() - (b->offset() + b->size());
550 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
551 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
552 empty->set_offset(b->offset() + b->size());
553 empty->set_next_block(b->next_block());
554 b->next_block()->set_prev_block(empty);
555 b->set_next_block(empty);
556 empty->set_prev_block(b);
557 }
558 b = b->next_block();
559 }
560 assert(b->next_block() == nullptr, "Invariant at this point");
561 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
562 // If the super class has @Contended annotation, a padding block is
563 // inserted at the end to ensure that fields from the subclasses won't share
564 // the cache line of the last field of the contended class
565 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
566 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
567 p->set_offset(b->offset() + b->size());
568 b->set_next_block(p);
569 p->set_prev_block(b);
570 b = p;
571 }
572
573 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
574 last->set_offset(b->offset() + b->size());
575 assert(last->offset() > 0, "Sanity check");
576 b->set_next_block(last);
577 last->set_prev_block(b);
578 _last = last;
579 }
580
581 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
582 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
583 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
584 block->set_offset(slot->offset());
585 slot->set_offset(slot->offset() + block->size());
586 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
587 assert(slot->size() - block->size() >= 0, "no negative size allowed");
588 slot->set_size(slot->size() - block->size());
589 block->set_prev_block(slot->prev_block());
590 block->set_next_block(slot);
591 slot->set_prev_block(block);
592 if (block->prev_block() != nullptr) {
593 block->prev_block()->set_next_block(block);
594 }
595 if (_blocks == slot) {
596 _blocks = block;
597 }
598 if (_start == slot) {
599 _start = block;
600 }
601 return block;
602 }
603
604 void FieldLayout::remove(LayoutRawBlock* block) {
605 assert(block != nullptr, "Sanity check");
606 assert(block != _last, "Sanity check");
607 if (_blocks == block) {
608 _blocks = block->next_block();
609 if (_blocks != nullptr) {
610 _blocks->set_prev_block(nullptr);
611 }
612 } else {
613 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
614 block->prev_block()->set_next_block(block->next_block());
615 block->next_block()->set_prev_block(block->prev_block());
616 }
617 if (block == _start) {
618 _start = block->prev_block();
619 }
620 }
621
622 void FieldLayout::shift_fields(int shift) {
623 LayoutRawBlock* b = first_field_block();
624 LayoutRawBlock* previous = b->prev_block();
625 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
626 previous->set_size(previous->size() + shift);
627 } else {
628 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
629 nb->set_offset(b->offset());
630 previous->set_next_block(nb);
631 nb->set_prev_block(previous);
632 b->set_prev_block(nb);
633 nb->set_next_block(b);
634 }
635 while (b != nullptr) {
636 b->set_offset(b->offset() + shift);
637 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
638 _field_info->adr_at(b->field_index())->set_offset(b->offset());
639 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
640 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
641 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
642 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
643 }
644 }
645 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
646 b = b->next_block();
647 }
648 }
649
650 LayoutRawBlock* FieldLayout::find_null_marker() {
651 LayoutRawBlock* b = _blocks;
652 while (b != nullptr) {
653 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
654 return b;
655 }
656 b = b->next_block();
657 }
658 ShouldNotReachHere();
659 }
660
661 void FieldLayout::remove_null_marker() {
662 LayoutRawBlock* b = first_field_block();
663 while (b != nullptr) {
664 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
665 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
666 LayoutRawBlock* n = b->next_block();
667 remove(b);
668 n->set_offset(b->offset());
669 n->set_size(n->size() + b->size());
670 } else {
671 b->set_block_kind(LayoutRawBlock::EMPTY);
672 }
673 return;
674 }
675 b = b->next_block();
676 }
677 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
678 }
679
680 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields, bool dummy_field_is_reused_as_null_marker) {
681 ResourceMark rm;
682 LayoutRawBlock* b = _blocks;
683 while(b != _last) {
684 switch(b->block_kind()) {
685 case LayoutRawBlock::REGULAR: {
686 FieldInfo* fi = _field_info->adr_at(b->field_index());
687 output->print(" @%d %s %d/%d \"%s\" %s",
688 b->offset(),
689 "REGULAR",
690 b->size(),
691 b->alignment(),
692 fi->name(_cp)->as_C_string(),
693 fi->signature(_cp)->as_C_string());
694
695 if (dummy_field_is_reused_as_null_marker) {
696 const bool is_dummy_field = fi->name(_cp)->fast_compare(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(empty_marker_name))) == 0;
697 if (is_dummy_field) {
698 output->print(" (reused as null-marker)");
699 }
700 }
701
702 output->cr();
703 break;
704 }
705 case LayoutRawBlock::FLAT: {
706 FieldInfo* fi = _field_info->adr_at(b->field_index());
707 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
708 assert(ik != nullptr, "");
709 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
710 b->offset(),
711 "FLAT",
712 b->size(),
713 b->alignment(),
714 fi->name(_cp)->as_C_string(),
715 fi->signature(_cp)->as_C_string(),
716 ik->name()->as_C_string(),
717 ik->class_loader_data(),
718 LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
719 break;
720 }
721 case LayoutRawBlock::RESERVED: {
722 output->print_cr(" @%d %s %d/-",
723 b->offset(),
724 "RESERVED",
725 b->size());
726 break;
727 }
728 case LayoutRawBlock::INHERITED: {
729 assert(!is_static, "Static fields are not inherited in layouts");
730 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
731 bool found = false;
732 const InstanceKlass* ik = super;
733 while (!found && ik != nullptr) {
734 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
735 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
736 output->print_cr(" @%d %s %d/%d \"%s\" %s",
737 b->offset(),
738 "INHERITED",
739 b->size(),
740 b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
741 fs.name()->as_C_string(),
742 fs.signature()->as_C_string());
743 found = true;
744 break;
745 }
746 }
747 ik = ik->super();
748 }
749 break;
750 }
751 case LayoutRawBlock::EMPTY:
752 output->print_cr(" @%d %s %d/1",
753 b->offset(),
754 "EMPTY",
755 b->size());
756 break;
757 case LayoutRawBlock::PADDING:
758 output->print_cr(" @%d %s %d/1",
759 b->offset(),
760 "PADDING",
761 b->size());
762 break;
763 case LayoutRawBlock::NULL_MARKER:
764 {
765 output->print_cr(" @%d %s %d/1 ",
766 b->offset(),
767 "NULL_MARKER",
768 b->size());
769 break;
770 }
771 default:
772 fatal("Unknown block type");
773 }
774 b = b->next_block();
775 }
776 }
777
778 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
779 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
780 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
781 _classname(classname),
782 _loader_data(loader_data),
783 _super_klass(super_klass),
784 _constant_pool(constant_pool),
785 _field_info(field_info),
786 _info(info),
787 _inline_layout_info_array(inline_layout_info_array),
788 _root_group(nullptr),
789 _contended_groups(GrowableArray<FieldGroup*>(8)),
790 _static_fields(nullptr),
791 _layout(nullptr),
792 _static_layout(nullptr),
793 _nonstatic_oopmap_count(0),
794 _payload_alignment(-1),
795 _payload_offset(-1),
796 _null_marker_offset(-1),
797 _payload_size_in_bytes(-1),
798 _null_free_non_atomic_layout_size_in_bytes(-1),
799 _null_free_non_atomic_layout_alignment(-1),
800 _null_free_atomic_layout_size_in_bytes(-1),
801 _nullable_atomic_layout_size_in_bytes(-1),
802 _nullable_non_atomic_layout_size_in_bytes(-1),
803 _fields_size_sum(0),
804 _declared_nonstatic_fields_count(0),
805 _has_non_naturally_atomic_fields(false),
806 _is_naturally_atomic(false),
807 _must_be_atomic(must_be_atomic),
808 _has_nonstatic_fields(false),
809 _has_inlineable_fields(false),
810 _has_inlined_fields(false),
811 _is_contended(is_contended),
812 _is_inline_type(is_inline_type),
813 _is_abstract_value(is_abstract_value),
814 _is_empty_inline_class(false) {}
815
816 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
817 assert(g > 0, "must only be called for named contended groups");
818 FieldGroup* fg = nullptr;
819 for (int i = 0; i < _contended_groups.length(); i++) {
820 fg = _contended_groups.at(i);
821 if (fg->contended_group() == g) return fg;
822 }
823 fg = new FieldGroup(g);
824 _contended_groups.append(fg);
825 return fg;
826 }
827
828 void FieldLayoutBuilder::prologue() {
829 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
830 const InstanceKlass* super_klass = _super_klass;
831 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
832 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
833 if (super_klass != nullptr) {
834 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
835 }
836 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
837 _static_layout->initialize_static_layout();
838 _static_fields = new FieldGroup();
839 _root_group = new FieldGroup();
840 }
841
842 // Field sorting for regular (non-inline) classes:
843 // - fields are sorted in static and non-static fields
844 // - non-static fields are also sorted according to their contention group
845 // (support of the @Contended annotation)
846 // - @Contended annotation is ignored for static fields
847 // - field flattening decisions are taken in this method
848 void FieldLayoutBuilder::regular_field_sorting() {
849 int idx = 0;
850 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
851 FieldGroup* group = nullptr;
852 FieldInfo fieldinfo = *it;
853 if (fieldinfo.access_flags().is_static()) {
854 group = _static_fields;
855 } else {
856 _has_nonstatic_fields = true;
857 if (fieldinfo.field_flags().is_contended()) {
858 int g = fieldinfo.contended_group();
859 if (g == 0) {
860 group = new FieldGroup(true);
861 _contended_groups.append(group);
862 } else {
863 group = get_or_create_contended_group(g);
864 }
865 } else {
866 group = _root_group;
867 }
868 }
869 assert(group != nullptr, "invariant");
870 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
871 switch(type) {
872 case T_BYTE:
873 case T_CHAR:
874 case T_DOUBLE:
875 case T_FLOAT:
876 case T_INT:
877 case T_LONG:
878 case T_SHORT:
879 case T_BOOLEAN:
880 group->add_primitive_field(idx, type);
881 break;
882 case T_OBJECT:
883 case T_ARRAY:
884 {
885 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
886
887 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
888 _has_inlineable_fields = true;
889 }
890
891 if (lk == LayoutKind::REFERENCE) {
892 if (group != _static_fields) _nonstatic_oopmap_count++;
893 group->add_oop_field(idx);
894 } else {
895 assert(group != _static_fields, "Static fields are not flattened");
896 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
897 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
898
899 const int field_index = (int)fieldinfo.index();
900 assert(_inline_layout_info_array != nullptr, "Array must have been created");
901 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
902 _has_inlined_fields = true;
903 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
904 int size, alignment;
905 get_size_and_alignment(vk, lk, &size, &alignment);
906 group->add_flat_field(idx, vk, lk, size, alignment);
907 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
908 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
909 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
910 _field_info->adr_at(idx)->set_layout_kind(lk);
911 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
912 }
913 break;
914 }
915 default:
916 fatal("Something wrong?");
917 }
918 }
919 _root_group->sort_by_size();
920 _static_fields->sort_by_size();
921 if (!_contended_groups.is_empty()) {
922 for (int i = 0; i < _contended_groups.length(); i++) {
923 _contended_groups.at(i)->sort_by_size();
924 }
925 }
926 }
927
928 /* Field sorting for inline classes:
929 * - because inline classes are immutable, the @Contended annotation is ignored
930 * when computing their layout (with only read operation, there's no false
931 * sharing issue)
932 * - this method also records the alignment of the field with the most
933 * constraining alignment, this value is then used as the alignment
934 * constraint when flattening this inline type into another container
935 * - field flattening decisions are taken in this method (those decisions are
936 * currently only based in the size of the fields to be flattened, the size
937 * of the resulting instance is not considered)
938 */
939 void FieldLayoutBuilder::inline_class_field_sorting() {
940 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
941 int alignment = -1;
942 int idx = 0;
943 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
944 FieldGroup* group = nullptr;
945 FieldInfo fieldinfo = *it;
946 int field_alignment = 1;
947 if (fieldinfo.access_flags().is_static()) {
948 group = _static_fields;
949 } else {
950 _has_nonstatic_fields = true;
951 _declared_nonstatic_fields_count++;
952 group = _root_group;
953 }
954 assert(group != nullptr, "invariant");
955 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
956 switch(type) {
957 case T_BYTE:
958 case T_CHAR:
959 case T_DOUBLE:
960 case T_FLOAT:
961 case T_INT:
962 case T_LONG:
963 case T_SHORT:
964 case T_BOOLEAN:
965 if (group != _static_fields) {
966 field_alignment = type2aelembytes(type); // alignment == size for primitive types
967 }
968 group->add_primitive_field(idx, type);
969 break;
970 case T_OBJECT:
971 case T_ARRAY:
972 {
973 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
974 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
975
976 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
977 _has_inlineable_fields = true;
978 }
979
980 if (lk == LayoutKind::REFERENCE) {
981 if (group != _static_fields) {
982 _nonstatic_oopmap_count++;
983 field_alignment = type2aelembytes(type); // alignment == size for oops
984 }
985 group->add_oop_field(idx);
986 } else {
987 assert(group != _static_fields, "Static fields are not flattened");
988 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
989 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
990
991 const int field_index = (int)fieldinfo.index();
992 assert(_inline_layout_info_array != nullptr, "Array must have been created");
993 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
994 _has_inlined_fields = true;
995 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
996 if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
997 int size, alignment;
998 get_size_and_alignment(vk, lk, &size, &alignment);
999 group->add_flat_field(idx, vk, lk, size, alignment);
1000 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
1001 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
1002 field_alignment = alignment;
1003 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
1004 _field_info->adr_at(idx)->set_layout_kind(lk);
1005 }
1006 break;
1007 }
1008 default:
1009 fatal("Unexpected BasicType");
1010 }
1011 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
1012 }
1013 _payload_alignment = alignment;
1014 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
1015 }
1016
1017 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
1018 if (ContendedPaddingWidth > 0) {
1019 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
1020 _layout->insert(slot, padding);
1021 }
1022 }
1023
1024 // Computation of regular classes layout is an evolution of the previous default layout
1025 // (FieldAllocationStyle 1):
1026 // - primitive fields (both primitive types and flat inline types) are allocated
1027 // first (from the biggest to the smallest)
1028 // - oop fields are allocated, either in existing gaps or at the end of
1029 // the layout. We allocate oops in a single block to have a single oop map entry.
1030 // - if the super class ended with an oop, we lead with oops. That will cause the
1031 // trailing oop map entry of the super class and the oop map entry of this class
1032 // to be folded into a single entry later. Correspondingly, if the super class
1033 // ends with a primitive field, we gain nothing by leading with oops; therefore
1034 // we let oop fields trail, thus giving future derived classes the chance to apply
1035 // the same trick.
1036 void FieldLayoutBuilder::compute_regular_layout() {
1037 bool need_tail_padding = false;
1038 prologue();
1039 regular_field_sorting();
1040 if (_is_contended) {
1041 _layout->set_start(_layout->last_block());
1042 // insertion is currently easy because the current strategy doesn't try to fill holes
1043 // in super classes layouts => the _start block is by consequence the _last_block
1044 insert_contended_padding(_layout->start());
1045 need_tail_padding = true;
1046 }
1047
1048 if (_super_ends_with_oop) {
1049 _layout->add(_root_group->oop_fields());
1050 _layout->add(_root_group->big_primitive_fields());
1051 _layout->add(_root_group->small_primitive_fields());
1052 } else {
1053 _layout->add(_root_group->big_primitive_fields());
1054 _layout->add(_root_group->small_primitive_fields());
1055 _layout->add(_root_group->oop_fields());
1056 }
1057
1058 if (!_contended_groups.is_empty()) {
1059 for (int i = 0; i < _contended_groups.length(); i++) {
1060 FieldGroup* cg = _contended_groups.at(i);
1061 LayoutRawBlock* start = _layout->last_block();
1062 insert_contended_padding(start);
1063 _layout->add(cg->big_primitive_fields());
1064 _layout->add(cg->small_primitive_fields(), start);
1065 _layout->add(cg->oop_fields(), start);
1066 need_tail_padding = true;
1067 }
1068 }
1069
1070 if (need_tail_padding) {
1071 insert_contended_padding(_layout->last_block());
1072 }
1073
1074 // Warning: IntanceMirrorKlass expects static oops to be allocated first
1075 _static_layout->add_contiguously(_static_fields->oop_fields());
1076 _static_layout->add(_static_fields->big_primitive_fields());
1077 _static_layout->add(_static_fields->small_primitive_fields());
1078
1079 epilogue();
1080 }
1081
1082 /* Computation of inline classes has a slightly different strategy than for
1083 * regular classes. Regular classes have their oop fields allocated at the end
1084 * of the layout to increase GC performances. Unfortunately, this strategy
1085 * increases the number of empty slots inside an instance. Because the purpose
1086 * of inline classes is to be embedded into other containers, it is critical
1087 * to keep their size as small as possible. For this reason, the allocation
1088 * strategy is:
1089 * - big primitive fields (primitive types and flat inline type smaller
1090 * than an oop) are allocated first (from the biggest to the smallest)
1091 * - then oop fields
1092 * - then small primitive fields (from the biggest to the smallest)
1093 */
1094 void FieldLayoutBuilder::compute_inline_class_layout() {
1095
1096 // Test if the concrete inline class is an empty class (no instance fields)
1097 // and insert a dummy field if needed
1098 if (!_is_abstract_value) {
1099 bool declares_nonstatic_fields = false;
1100 for (FieldInfo fieldinfo : *_field_info) {
1101 if (!fieldinfo.access_flags().is_static()) {
1102 declares_nonstatic_fields = true;
1103 break;
1104 }
1105 }
1106
1107 if (!declares_nonstatic_fields) {
1108 bool has_inherited_fields = _super_klass != nullptr && _super_klass->has_nonstatic_fields();
1109 if (!has_inherited_fields) {
1110 // Inject ".empty" dummy field
1111 _is_empty_inline_class = true;
1112 FieldInfo::FieldFlags fflags(0);
1113 fflags.update_injected(true);
1114 AccessFlags aflags;
1115 FieldInfo fi(aflags,
1116 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1117 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1118 0,
1119 fflags);
1120 int idx = _field_info->append(fi);
1121 _field_info->adr_at(idx)->set_index(idx);
1122 }
1123 }
1124 }
1125
1126 prologue();
1127 inline_class_field_sorting();
1128
1129 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1130
1131 if (!_layout->super_has_nonstatic_fields()) {
1132 // No inherited fields, the layout must be empty except for the RESERVED block
1133 // PADDING is inserted if needed to ensure the correct alignment of the payload.
1134 if (_is_abstract_value && _has_nonstatic_fields) {
1135 // non-static fields of the abstract class must be laid out without knowning
1136 // the alignment constraints of the fields of the sub-classes, so the worst
1137 // case scenario is assumed, which is currently the alignment of T_LONG.
1138 // PADDING is added if needed to ensure the payload will respect this alignment.
1139 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1140 }
1141 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY, "Unexpected");
1142 LayoutRawBlock* first_empty = _layout->start()->next_block();
1143 if (first_empty->offset() % _payload_alignment != 0) {
1144 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1145 _layout->insert(first_empty, padding);
1146 if (first_empty->size() == 0) {
1147 _layout->remove(first_empty);
1148 }
1149 _layout->set_start(padding);
1150 }
1151 } else { // the class has inherited some fields from its super(s)
1152 if (!_is_abstract_value) {
1153 // This is the step where the layout of the final concrete value class' layout
1154 // is computed. Super abstract value classes might have been too conservative
1155 // regarding alignment constraints, but now that the full set of non-static fields is
1156 // known, compute which alignment to use, then set first allowed field offset
1157
1158 assert(_has_nonstatic_fields, "Concrete value classes must have at least one field");
1159 if (_payload_alignment == -1) { // current class declares no local nonstatic fields
1160 _payload_alignment = _layout->super_min_align_required();
1161 }
1162
1163 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1164 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1165
1166 if (_payload_alignment < _layout->super_alignment()) {
1167 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1168 assert(new_alignment % _payload_alignment == 0, "Must be");
1169 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1170 _payload_alignment = new_alignment;
1171 }
1172 _layout->set_start(_layout->first_field_block());
1173 }
1174 }
1175
1176 _layout->add(_root_group->big_primitive_fields());
1177 _layout->add(_root_group->oop_fields());
1178 _layout->add(_root_group->small_primitive_fields());
1179
1180 LayoutRawBlock* first_field = _layout->first_field_block();
1181 if (first_field != nullptr) {
1182 _payload_offset = _layout->first_field_block()->offset();
1183 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1184 } else {
1185 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1186 _payload_offset = _layout->blocks()->size();
1187 _payload_size_in_bytes = 0;
1188 }
1189
1190 // Determining if the value class is naturally atomic:
1191 if ((!_layout->super_has_nonstatic_fields() && _declared_nonstatic_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1192 || (_layout->super_has_nonstatic_fields() && _super_klass->is_naturally_atomic() && _declared_nonstatic_fields_count == 0)) {
1193 _is_naturally_atomic = true;
1194 }
1195
1196 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1197 // From this, additional layouts will be computed: atomic and nullable layouts
1198 // Once those additional layouts are computed, the raw layout might need some adjustments
1199
1200 bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1201
1202 if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1203 // Validation of the non atomic layout
1204 if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1205 _null_free_non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1206 _null_free_non_atomic_layout_alignment = _payload_alignment;
1207 }
1208
1209 // Next step is to compute the characteristics for a layout enabling atomic updates
1210 if (UseAtomicValueFlattening) {
1211 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1212 if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1213 _null_free_atomic_layout_size_in_bytes = atomic_size;
1214 }
1215 }
1216
1217 // Next step is the nullable layouts: they must include a null marker
1218 if (UseNullableValueFlattening || UseNullableNonAtomicValueFlattening) {
1219 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1220 LayoutRawBlock* b = _layout->first_field_block();
1221 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1222 int null_marker_offset = -1;
1223 if (_is_empty_inline_class) {
1224 // Reusing the dummy field as a field marker
1225 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1226 null_marker_offset = b->offset();
1227 } else {
1228 while (b != _layout->last_block()) {
1229 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1230 break;
1231 }
1232 b = b->next_block();
1233 }
1234 if (b != _layout->last_block()) {
1235 // found an empty slot, register its offset from the beginning of the payload
1236 null_marker_offset = b->offset();
1237 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1238 _layout->add_field_at_offset(marker, b->offset());
1239 }
1240 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1241 int last_offset = _layout->last_block()->offset();
1242 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1243 _layout->insert_field_block(_layout->last_block(), marker);
1244 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1245 null_marker_offset = marker->offset();
1246 }
1247 }
1248 assert(null_marker_offset != -1, "Sanity check");
1249 // Now that the null marker is there, the size of the nullable layout must computed
1250 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1251 if (UseNullableNonAtomicValueFlattening) {
1252 _nullable_non_atomic_layout_size_in_bytes = new_raw_size;
1253 _null_marker_offset = null_marker_offset;
1254 _null_free_non_atomic_layout_alignment = _payload_alignment;
1255 }
1256 if (UseNullableValueFlattening) {
1257 // For the nullable atomic layout, the size mut be compatible with the platform capabilities
1258 int nullable_atomic_size = round_up_power_of_2(new_raw_size);
1259 if (nullable_atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1260 _nullable_atomic_layout_size_in_bytes = nullable_atomic_size;
1261 _null_marker_offset = null_marker_offset;
1262 }
1263 }
1264 if (_null_marker_offset == -1) { // No nullable layout has been accepted
1265 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1266 // from the layout, otherwise it will appear anyway if the layout is printer
1267 if (!_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1268 _layout->remove_null_marker();
1269 }
1270 }
1271 }
1272 // If the inline class has an atomic or nullable atomic layout,
1273 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1274 // could remain simple (single instruction without intermediate copy). This might required
1275 // to shift all fields in the raw layout, but this operation is possible only if the class
1276 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1277 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1278 // and loosely consistent are supported.
1279 int required_alignment = _payload_alignment;
1280 if (has_null_free_atomic_layout() && required_alignment < null_free_atomic_layout_size_in_bytes()) {
1281 required_alignment = null_free_atomic_layout_size_in_bytes();
1282 }
1283 if (has_nullable_atomic_layout() && required_alignment < nullable_atomic_layout_size_in_bytes()) {
1284 required_alignment = nullable_atomic_layout_size_in_bytes();
1285 }
1286 int shift = first_field->offset() % required_alignment;
1287 if (shift != 0) {
1288 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1289 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1290 _layout->shift_fields(shift);
1291 _payload_offset = _layout->first_field_block()->offset();
1292 if (has_nullable_atomic_layout() || has_nullable_non_atomic_layout()) {
1293 assert(!_is_empty_inline_class, "Should not get here with empty values");
1294 _null_marker_offset = _layout->find_null_marker()->offset();
1295 }
1296 _payload_alignment = required_alignment;
1297 } else {
1298 _null_free_atomic_layout_size_in_bytes = -1;
1299 if (has_nullable_atomic_layout() && !has_nullable_non_atomic_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1300 _layout->remove_null_marker();
1301 _null_marker_offset = -1;
1302 }
1303 _nullable_atomic_layout_size_in_bytes = -1;
1304 }
1305 } else {
1306 _payload_alignment = required_alignment;
1307 }
1308
1309 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1310 // instances must also be the nullable layout, in order to be able to set the null marker to
1311 // non-null before copying the payload to other containers.
1312 if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_atomic_layout_size_in_bytes()) {
1313 _payload_size_in_bytes = nullable_atomic_layout_size_in_bytes();
1314 }
1315 if (has_nullable_non_atomic_layout() && payload_layout_size_in_bytes() < nullable_non_atomic_layout_size_in_bytes()) {
1316 _payload_size_in_bytes = nullable_non_atomic_layout_size_in_bytes();
1317 }
1318
1319 // if the inline class has a null-free atomic layout, the the layout used in heap allocated standalone
1320 // instances must have at least equal to the atomic layout to allow safe read/write atomic
1321 // operation
1322 if (has_null_free_atomic_layout() && payload_layout_size_in_bytes() < null_free_atomic_layout_size_in_bytes()) {
1323 _payload_size_in_bytes = null_free_atomic_layout_size_in_bytes();
1324 }
1325 }
1326 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1327 _static_layout->add_contiguously(_static_fields->oop_fields());
1328 _static_layout->add(_static_fields->big_primitive_fields());
1329 _static_layout->add(_static_fields->small_primitive_fields());
1330
1331 generate_acmp_maps();
1332 epilogue();
1333 }
1334
1335 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1336 InlineKlass* vklass, int offset) {
1337 int diff = offset - vklass->payload_offset();
1338 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1339 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1340 while (map < last_map) {
1341 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1342 map++;
1343 }
1344 }
1345
1346 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1347 if (list == nullptr) return;
1348 for (int i = 0; i < list->length(); i++) {
1349 LayoutRawBlock* f = list->at(i);
1350 if (f->block_kind() == LayoutRawBlock::FLAT) {
1351 InlineKlass* vk = f->inline_klass();
1352 assert(vk != nullptr, "Should have been initialized");
1353 if (vk->contains_oops()) {
1354 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1355 }
1356 }
1357 }
1358 }
1359
1360 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1361 if (group->oop_fields() != nullptr) {
1362 for (int i = 0; i < group->oop_fields()->length(); i++) {
1363 LayoutRawBlock* b = group->oop_fields()->at(i);
1364 nonstatic_oop_maps->add(b->offset(), 1);
1365 }
1366 }
1367 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1368 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1369 }
1370
1371 static int insert_segment(GrowableArray<Pair<int,int>>* map, int offset, int size, int last_idx) {
1372 if (map->is_empty()) {
1373 return map->append(Pair<int,int>(offset, size));
1374 }
1375 last_idx = last_idx == -1 ? 0 : last_idx;
1376 int start = map->adr_at(last_idx)->first > offset ? 0 : last_idx;
1377 bool inserted = false;
1378 for (int c = start; c < map->length(); c++) {
1379 if (offset == (map->adr_at(c)->first + map->adr_at(c)->second)) {
1380 //contiguous to the last field, can be coalesced
1381 map->adr_at(c)->second = map->adr_at(c)->second + size;
1382 inserted = true;
1383 break; // break out of the for loop
1384 }
1385 if (offset < (map->adr_at(c)->first)) {
1386 map->insert_before(c, Pair<int,int>(offset, size));
1387 last_idx = c;
1388 inserted = true;
1389 break; // break out of the for loop
1390 }
1391 }
1392 if (!inserted) {
1393 last_idx = map->append(Pair<int,int>(offset, size));
1394 }
1395 return last_idx;
1396 }
1397
1398 static int insert_map_at_offset(GrowableArray<Pair<int,int>>* nonoop_map, GrowableArray<int>* oop_map,
1399 const InstanceKlass* ik, int offset, int payload_offset, int last_idx) {
1400 oop mirror = ik->java_mirror();
1401 oop array = mirror->obj_field(ik->acmp_maps_offset());
1402 assert(array != nullptr, "Sanity check");
1403 typeArrayOop fmap = (typeArrayOop)array;
1404 typeArrayHandle fmap_h(Thread::current(), fmap);
1405 int nb_nonoop_field = fmap_h->int_at(0);
1406 int field_offset = offset - payload_offset;
1407 for (int i = 0; i < nb_nonoop_field; i++) {
1408 last_idx = insert_segment(nonoop_map,
1409 field_offset + fmap_h->int_at( i * 2 + 1),
1410 fmap_h->int_at( i * 2 + 2), last_idx);
1411 }
1412 int len = fmap_h->length();
1413 for (int i = nb_nonoop_field * 2 + 1; i < len; i++) {
1414 oop_map->append(field_offset + fmap_h->int_at(i));
1415 }
1416 return last_idx;
1417 }
1418
1419 static void split_after(GrowableArray<Pair<int,int>>* map, int idx, int head) {
1420 int offset = map->adr_at(idx)->first;
1421 int size = map->adr_at(idx)->second;
1422 if (size <= head) return;
1423 map->adr_at(idx)->first = offset + head;
1424 map->adr_at(idx)->second = size - head;
1425 map->insert_before(idx, Pair<int,int>(offset, head));
1426
1427 }
1428
1429 void FieldLayoutBuilder::generate_acmp_maps() {
1430 assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1431
1432 // create/initialize current class' maps
1433 // The Pair<int,int> values in the nonoop_acmp_map represent <offset,size> segments of memory
1434 _nonoop_acmp_map = new GrowableArray<Pair<int,int>>();
1435 _oop_acmp_map = new GrowableArray<int>();
1436 if (_is_empty_inline_class) return;
1437 // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1438 // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1439 // next to the previous insertion. However, in some cases local fields and inherited fields can be
1440 // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1441 int last_idx = -1;
1442 if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) { // Assumes j.l.Object cannot have fields
1443 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, 0, last_idx);
1444 }
1445
1446 // Processing local fields
1447 LayoutRawBlock* b = _layout->blocks();
1448 while(b != _layout->last_block()) {
1449 switch(b->block_kind()) {
1450 case LayoutRawBlock::RESERVED:
1451 case LayoutRawBlock::EMPTY:
1452 case LayoutRawBlock::PADDING:
1453 case LayoutRawBlock::NULL_MARKER:
1454 case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1455 // skip
1456 break;
1457
1458 case LayoutRawBlock::REGULAR:
1459 {
1460 FieldInfo* fi = _field_info->adr_at(b->field_index());
1461 if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1462 _oop_acmp_map->append(b->offset());
1463 } else {
1464 // Non-oop case
1465 last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1466 }
1467 break;
1468 }
1469 case LayoutRawBlock::FLAT:
1470 {
1471 InlineKlass* vk = b->inline_klass();
1472 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, b->offset(), vk->payload_offset(), last_idx);
1473 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1474 int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1475 last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1476 // Important note: the implementation assumes that for nullable flat fields, if the
1477 // null marker is zero (field is null), then all the fields of the flat field are also
1478 // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1479 // all fields are included in the map, plus the null marker
1480 // If it happens that the assumption above is wrong, then nullable flat fields would
1481 // require a dedicated section in the acmp map, and be handled differently: null_marker
1482 // comparison first, and if null markers are identical and non-zero, then conditional
1483 // comparison of the other fields
1484 }
1485 }
1486 break;
1487
1488 }
1489 b = b->next_block();
1490 }
1491
1492 // split segments into well-aligned blocks
1493 int idx = 0;
1494 while (idx < _nonoop_acmp_map->length()) {
1495 int offset = _nonoop_acmp_map->adr_at(idx)->first;
1496 int size = _nonoop_acmp_map->adr_at(idx)->second;
1497 int mod = offset % 8;
1498 switch (mod) {
1499 case 0:
1500 break;
1501 case 4:
1502 split_after(_nonoop_acmp_map, idx, 4);
1503 break;
1504 case 2:
1505 case 6:
1506 split_after(_nonoop_acmp_map, idx, 2);
1507 break;
1508 case 1:
1509 case 3:
1510 case 5:
1511 case 7:
1512 split_after(_nonoop_acmp_map, idx, 1);
1513 break;
1514 default:
1515 ShouldNotReachHere();
1516 }
1517 idx++;
1518 }
1519 }
1520
1521 void FieldLayoutBuilder::epilogue() {
1522 // Computing oopmaps
1523 OopMapBlocksBuilder* nonstatic_oop_maps =
1524 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1525 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1526 if (super_oop_map_count > 0) {
1527 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1528 _super_klass->nonstatic_oop_map_count());
1529 }
1530 register_embedded_oops(nonstatic_oop_maps, _root_group);
1531 if (!_contended_groups.is_empty()) {
1532 for (int i = 0; i < _contended_groups.length(); i++) {
1533 FieldGroup* cg = _contended_groups.at(i);
1534 if (cg->oop_count() > 0) {
1535 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1536 register_embedded_oops(nonstatic_oop_maps, cg);
1537 }
1538 }
1539 }
1540 nonstatic_oop_maps->compact();
1541
1542 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1543 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1544 int static_fields_size = (static_fields_end -
1545 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1546 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1547
1548 // Pass back information needed for InstanceKlass creation
1549
1550 _info->oop_map_blocks = nonstatic_oop_maps;
1551 _info->_instance_size = align_object_size(instance_end / wordSize);
1552 _info->_static_field_size = static_fields_size;
1553 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1554 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1555 _info->_has_inlined_fields = _has_inlined_fields;
1556 _info->_is_naturally_atomic = _is_naturally_atomic;
1557 if (_is_inline_type) {
1558 _info->_must_be_atomic = _must_be_atomic;
1559 _info->_payload_alignment = _payload_alignment;
1560 _info->_payload_offset = _payload_offset;
1561 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1562 _info->_null_free_non_atomic_size_in_bytes = _null_free_non_atomic_layout_size_in_bytes;
1563 _info->_null_free_non_atomic_alignment = _null_free_non_atomic_layout_alignment;
1564 _info->_null_free_atomic_layout_size_in_bytes = _null_free_atomic_layout_size_in_bytes;
1565 _info->_nullable_atomic_layout_size_in_bytes = _nullable_atomic_layout_size_in_bytes;
1566 _info->_nullable_non_atomic_layout_size_in_bytes = _nullable_non_atomic_layout_size_in_bytes;
1567 _info->_null_marker_offset = _null_marker_offset;
1568 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1569 _info->_is_empty_inline_klass = _is_empty_inline_class;
1570 }
1571
1572 // Acmp maps are needed for both concrete and abstract value classes
1573 if (_is_inline_type || _is_abstract_value) {
1574 _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1575 _info->_nonoop_acmp_map = _nonoop_acmp_map;
1576 _info->_oop_acmp_map = _oop_acmp_map;
1577 }
1578
1579 // This may be too restrictive, since if all the fields fit in 64
1580 // bits we could make the decision to align instances of this class
1581 // to 64-bit boundaries, and load and store them as single words.
1582 // And on machines which supported larger atomics we could similarly
1583 // allow larger values to be atomic, if properly aligned.
1584
1585 #ifdef ASSERT
1586 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1587 // which prints the details of LayoutRawBlocks used to compute the layout.
1588 // The code below checks that offsets in the _field_info meta-data match offsets
1589 // in the LayoutRawBlocks
1590 LayoutRawBlock* b = _layout->blocks();
1591 while(b != _layout->last_block()) {
1592 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1593 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1594 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1595 }
1596 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1597 }
1598 b = b->next_block();
1599 }
1600 b = _static_layout->blocks();
1601 while(b != _static_layout->last_block()) {
1602 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1603 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1604 }
1605 b = b->next_block();
1606 }
1607 #endif // ASSERT
1608
1609 static bool first_layout_print = true;
1610
1611
1612 if (PrintFieldLayout || (PrintInlineLayout && (_has_inlineable_fields || _is_inline_type || _is_abstract_value))) {
1613 ResourceMark rm;
1614 stringStream st;
1615 if (first_layout_print) {
1616 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1617 st.print_cr("Heap oop size = %d", heapOopSize);
1618 first_layout_print = false;
1619 }
1620 if (_super_klass != nullptr) {
1621 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1622 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1623 } else {
1624 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1625 }
1626 st.print_cr("Instance fields:");
1627 const bool dummy_field_is_reused_as_null_marker = _is_empty_inline_class && _null_marker_offset != -1;
1628 _layout->print(&st, false, _super_klass, _inline_layout_info_array, dummy_field_is_reused_as_null_marker);
1629 st.print_cr("Static fields:");
1630 _static_layout->print(&st, true, nullptr, _inline_layout_info_array, false);
1631 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1632 if (_is_inline_type) {
1633 st.print_cr("First field offset = %d", _payload_offset);
1634 st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1635 _payload_size_in_bytes, _payload_alignment);
1636 if (has_null_free_non_atomic_flat_layout()) {
1637 st.print_cr("%s layout: %d/%d",
1638 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1639 _null_free_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1640 } else {
1641 st.print_cr("%s layout: -/-",
1642 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1643 }
1644 if (has_null_free_atomic_layout()) {
1645 st.print_cr("%s layout: %d/%d",
1646 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1647 _null_free_atomic_layout_size_in_bytes, _null_free_atomic_layout_size_in_bytes);
1648 } else {
1649 st.print_cr("%s layout: -/-",
1650 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1651 }
1652 if (has_nullable_atomic_layout()) {
1653 st.print_cr("%s layout: %d/%d",
1654 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1655 _nullable_atomic_layout_size_in_bytes, _nullable_atomic_layout_size_in_bytes);
1656 } else {
1657 st.print_cr("%s layout: -/-",
1658 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1659 }
1660 if (has_nullable_non_atomic_layout()) {
1661 st.print_cr("%s layout: %d/%d",
1662 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT),
1663 _nullable_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1664 } else {
1665 st.print_cr("%s layout: -/-",
1666 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT));
1667 }
1668 if (_null_marker_offset != -1) {
1669 st.print_cr("Null marker offset = %d", _null_marker_offset);
1670 }
1671 st.print("Non-oop acmp map: ");
1672 for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1673 st.print("<%d,%d>, ", _nonoop_acmp_map->at(i).first, _nonoop_acmp_map->at(i).second);
1674 }
1675 st.print_cr("");
1676 st.print("oop acmp map: ");
1677 for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1678 st.print("%d, ", _oop_acmp_map->at(i));
1679 }
1680 st.print_cr("");
1681 }
1682 st.print_cr("---");
1683 // Print output all together.
1684 tty->print_raw(st.as_string());
1685 }
1686 }
1687
1688 void FieldLayoutBuilder::build_layout() {
1689 if (_is_inline_type || _is_abstract_value) {
1690 compute_inline_class_layout();
1691 } else {
1692 compute_regular_layout();
1693 }
1694 }