7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "jvm.h"
28 #include "memory/resourceArea.hpp"
29 #include "oops/array.hpp"
30 #include "oops/fieldStreams.inline.hpp"
31 #include "oops/instanceKlass.inline.hpp"
32 #include "oops/instanceMirrorKlass.hpp"
33 #include "oops/klass.inline.hpp"
34 #include "runtime/fieldDescriptor.inline.hpp"
35
36
37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
38 _next_block(nullptr),
39 _prev_block(nullptr),
40 _kind(kind),
41 _offset(-1),
42 _alignment(1),
43 _size(size),
44 _field_index(-1),
45 _is_reference(false) {
46 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
47 "Otherwise, should use the constructor with a field index argument");
48 assert(size > 0, "Sanity check");
49 }
50
51
52 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
53 _next_block(nullptr),
54 _prev_block(nullptr),
55 _kind(kind),
56 _offset(-1),
57 _alignment(alignment),
58 _size(size),
59 _field_index(index),
60 _is_reference(is_reference) {
61 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
62 "Other kind do not have a field index");
63 assert(size > 0, "Sanity check");
64 assert(alignment > 0, "Sanity check");
65 }
66
67 bool LayoutRawBlock::fit(int size, int alignment) {
68 int adjustment = 0;
69 if ((_offset % alignment) != 0) {
70 adjustment = alignment - (_offset % alignment);
71 }
72 return _size >= size + adjustment;
73 }
74
75 FieldGroup::FieldGroup(int contended_group) :
76 _next(nullptr),
77 _primitive_fields(nullptr),
78 _oop_fields(nullptr),
79 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
80 _oop_count(0) {}
81
82 void FieldGroup::add_primitive_field(int idx, BasicType type) {
83 int size = type2aelembytes(type);
84 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
85 if (_primitive_fields == nullptr) {
86 _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
87 }
88 _primitive_fields->append(block);
89 }
90
91 void FieldGroup::add_oop_field(int idx) {
92 int size = type2aelembytes(T_OBJECT);
93 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
94 if (_oop_fields == nullptr) {
95 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
96 }
97 _oop_fields->append(block);
98 _oop_count++;
99 }
100
101 void FieldGroup::sort_by_size() {
102 if (_primitive_fields != nullptr) {
103 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
104 }
105 }
106
107 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
108 _field_info(field_info),
109 _cp(cp),
110 _blocks(nullptr),
111 _start(_blocks),
112 _last(_blocks) {}
113
114 void FieldLayout::initialize_static_layout() {
115 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
116 _blocks->set_offset(0);
117 _last = _blocks;
118 _start = _blocks;
119 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
120 // during bootstrapping, the size of the java.lang.Class is still not known when layout
121 // of static field is computed. Field offsets are fixed later when the size is known
122 // (see java_lang_Class::fixup_mirror())
123 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
124 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
125 _blocks->set_offset(0);
126 }
127 }
128
129 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
130 if (super_klass == nullptr) {
131 super_ends_with_oop = false;
132 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
133 _blocks->set_offset(0);
134 _last = _blocks;
135 _start = _blocks;
136 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
137 } else {
138 bool super_has_instance_fields = false;
139 reconstruct_layout(super_klass, super_has_instance_fields, super_ends_with_oop);
140 fill_holes(super_klass);
141 if (!super_klass->has_contended_annotations() || !super_has_instance_fields) {
142 _start = _blocks; // start allocating fields from the first empty block
143 } else {
144 _start = _last; // append fields at the end of the reconstructed layout
145 }
146 }
147 }
148
149 LayoutRawBlock* FieldLayout::first_field_block() {
150 LayoutRawBlock* block = _start;
151 while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
152 && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
153 block = block->next_block();
154 }
155 return block;
156 }
157
158
159 // Insert a set of fields into a layout using a best-fit strategy.
160 // For each field, search for the smallest empty slot able to fit the field
161 // (satisfying both size and alignment requirements), if none is found,
162 // add the field at the end of the layout.
163 // Fields cannot be inserted before the block specified in the "start" argument
164 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
165 if (list == nullptr) return;
166 if (start == nullptr) start = this->_start;
167 bool last_search_success = false;
168 int last_size = 0;
169 int last_alignment = 0;
170 for (int i = 0; i < list->length(); i ++) {
171 LayoutRawBlock* b = list->at(i);
172 LayoutRawBlock* cursor = nullptr;
173 LayoutRawBlock* candidate = nullptr;
174
175 // if start is the last block, just append the field
176 if (start == last_block()) {
177 candidate = last_block();
178 }
179 // Before iterating over the layout to find an empty slot fitting the field's requirements,
180 // check if the previous field had the same requirements and if the search for a fitting slot
181 // was successful. If the requirements were the same but the search failed, a new search will
182 // fail the same way, so just append the field at the of the layout.
183 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
184 candidate = last_block();
185 } else {
186 // Iterate over the layout to find an empty slot fitting the field's requirements
187 last_size = b->size();
188 last_alignment = b->alignment();
189 cursor = last_block()->prev_block();
190 assert(cursor != nullptr, "Sanity check");
191 last_search_success = true;
192 while (cursor != start) {
193 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
194 if (candidate == nullptr || cursor->size() < candidate->size()) {
195 candidate = cursor;
196 }
197 }
198 cursor = cursor->prev_block();
199 }
200 if (candidate == nullptr) {
201 candidate = last_block();
202 last_search_success = false;
203 }
204 assert(candidate != nullptr, "Candidate must not be null");
205 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
206 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
207 }
208
209 insert_field_block(candidate, b);
210 }
211 }
212
213 // Used for classes with hard coded field offsets, insert a field at the specified offset */
214 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
215 assert(block != nullptr, "Sanity check");
216 block->set_offset(offset);
217 if (start == nullptr) {
218 start = this->_start;
219 }
220 LayoutRawBlock* slot = start;
221 while (slot != nullptr) {
222 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
223 slot == _last){
224 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
225 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
226 if (slot->offset() < block->offset()) {
227 int adjustment = block->offset() - slot->offset();
228 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
229 insert(slot, adj);
230 }
231 insert(slot, block);
232 if (slot->size() == 0) {
233 remove(slot);
234 }
235 _field_info->adr_at(block->field_index())->set_offset(block->offset());
236 return;
237 }
238 slot = slot->next_block();
239 }
240 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
241 }
242
243 // The allocation logic uses a best fit strategy: the set of fields is allocated
244 // in the first empty slot big enough to contain the whole set ((including padding
245 // to fit alignment constraints).
246 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
247 if (list == nullptr) return;
248 if (start == nullptr) {
249 start = _start;
250 }
251 // This code assumes that if the first block is well aligned, the following
252 // blocks would naturally be well aligned (no need for adjustment)
253 int size = 0;
254 for (int i = 0; i < list->length(); i++) {
255 size += list->at(i)->size();
256 }
257
258 LayoutRawBlock* candidate = nullptr;
259 if (start == last_block()) {
260 candidate = last_block();
261 } else {
262 LayoutRawBlock* first = list->at(0);
263 candidate = last_block()->prev_block();
264 while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
265 if (candidate == start) {
266 candidate = last_block();
267 break;
268 }
269 candidate = candidate->prev_block();
270 }
271 assert(candidate != nullptr, "Candidate must not be null");
272 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
273 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
274 }
275
276 for (int i = 0; i < list->length(); i++) {
277 LayoutRawBlock* b = list->at(i);
278 insert_field_block(candidate, b);
279 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
280 }
281 }
282
283 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
284 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
285 if (slot->offset() % block->alignment() != 0) {
286 int adjustment = block->alignment() - (slot->offset() % block->alignment());
287 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
288 insert(slot, adj);
289 }
290 insert(slot, block);
291 if (slot->size() == 0) {
292 remove(slot);
293 }
294 _field_info->adr_at(block->field_index())->set_offset(block->offset());
295 return block;
296 }
297
298 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
299 has_instance_fields = ends_with_oop = false;
300 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
301 BasicType last_type;
302 int last_offset = -1;
303 while (ik != nullptr) {
304 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
305 BasicType type = Signature::basic_type(fs.signature());
306 // distinction between static and non-static fields is missing
307 if (fs.access_flags().is_static()) continue;
308 has_instance_fields = true;
309 if (fs.offset() > last_offset) {
310 last_offset = fs.offset();
311 last_type = type;
312 }
313 int size = type2aelembytes(type);
314 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
315 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
316 block->set_offset(fs.offset());
317 all_fields->append(block);
318 }
319 ik = ik->super() == nullptr ? nullptr : ik->super();
320 }
321 assert(last_offset == -1 || last_offset > 0, "Sanity");
322 if (last_offset > 0 &&
323 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
324 ends_with_oop = true;
325 }
326
327 all_fields->sort(LayoutRawBlock::compare_offset);
328 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
329 _blocks->set_offset(0);
330 _last = _blocks;
331
332 for(int i = 0; i < all_fields->length(); i++) {
333 LayoutRawBlock* b = all_fields->at(i);
334 _last->set_next_block(b);
335 b->set_prev_block(_last);
336 _last = b;
337 }
338 _start = _blocks;
339 }
340
341 // Called during the reconstruction of a layout, after fields from super
342 // classes have been inserted. It fills unused slots between inserted fields
343 // with EMPTY blocks, so the regular field insertion methods would work.
344 // This method handles classes with @Contended annotations differently
345 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
346 // fields to interfere with contended fields/classes.
347 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
348 assert(_blocks != nullptr, "Sanity check");
349 assert(_blocks->offset() == 0, "first block must be at offset zero");
350 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
351 LayoutRawBlock* b = _blocks;
352 while (b->next_block() != nullptr) {
353 if (b->next_block()->offset() > (b->offset() + b->size())) {
354 int size = b->next_block()->offset() - (b->offset() + b->size());
355 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
356 empty->set_offset(b->offset() + b->size());
357 empty->set_next_block(b->next_block());
358 b->next_block()->set_prev_block(empty);
359 b->set_next_block(empty);
360 empty->set_prev_block(b);
361 }
362 b = b->next_block();
363 }
364 assert(b->next_block() == nullptr, "Invariant at this point");
365 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
366
367 // If the super class has @Contended annotation, a padding block is
368 // inserted at the end to ensure that fields from the subclasses won't share
369 // the cache line of the last field of the contended class
370 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
371 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
372 p->set_offset(b->offset() + b->size());
373 b->set_next_block(p);
374 p->set_prev_block(b);
375 b = p;
376 }
377
378 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
379 last->set_offset(b->offset() + b->size());
380 assert(last->offset() > 0, "Sanity check");
381 b->set_next_block(last);
382 last->set_prev_block(b);
383 _last = last;
384 }
385
386 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
387 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
388 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
389 block->set_offset(slot->offset());
390 slot->set_offset(slot->offset() + block->size());
391 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
392 assert(slot->size() - block->size() >= 0, "no negative size allowed");
393 slot->set_size(slot->size() - block->size());
394 block->set_prev_block(slot->prev_block());
395 block->set_next_block(slot);
396 slot->set_prev_block(block);
397 if (block->prev_block() != nullptr) {
398 block->prev_block()->set_next_block(block);
399 }
400 if (_blocks == slot) {
401 _blocks = block;
402 }
403 return block;
404 }
405
406 void FieldLayout::remove(LayoutRawBlock* block) {
407 assert(block != nullptr, "Sanity check");
408 assert(block != _last, "Sanity check");
409 if (_blocks == block) {
410 _blocks = block->next_block();
411 if (_blocks != nullptr) {
412 _blocks->set_prev_block(nullptr);
413 }
414 } else {
415 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
416 block->prev_block()->set_next_block(block->next_block());
417 block->next_block()->set_prev_block(block->prev_block());
418 }
419 if (block == _start) {
420 _start = block->prev_block();
421 }
422 }
423
424 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
425 ResourceMark rm;
426 LayoutRawBlock* b = _blocks;
427 while(b != _last) {
428 switch(b->kind()) {
429 case LayoutRawBlock::REGULAR: {
430 FieldInfo* fi = _field_info->adr_at(b->field_index());
431 output->print_cr(" @%d \"%s\" %s %d/%d %s",
432 b->offset(),
433 fi->name(_cp)->as_C_string(),
434 fi->signature(_cp)->as_C_string(),
435 b->size(),
436 b->alignment(),
437 "REGULAR");
438 break;
439 }
440 case LayoutRawBlock::FLATTENED: {
441 FieldInfo* fi = _field_info->adr_at(b->field_index());
442 output->print_cr(" @%d \"%s\" %s %d/%d %s",
443 b->offset(),
444 fi->name(_cp)->as_C_string(),
445 fi->signature(_cp)->as_C_string(),
446 b->size(),
447 b->alignment(),
448 "FLATTENED");
449 break;
450 }
451 case LayoutRawBlock::RESERVED: {
452 output->print_cr(" @%d %d/- %s",
453 b->offset(),
454 b->size(),
455 "RESERVED");
456 break;
457 }
458 case LayoutRawBlock::INHERITED: {
459 assert(!is_static, "Static fields are not inherited in layouts");
460 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
461 bool found = false;
462 const InstanceKlass* ik = super;
463 while (!found && ik != nullptr) {
464 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
465 if (fs.offset() == b->offset()) {
466 output->print_cr(" @%d \"%s\" %s %d/%d %s",
467 b->offset(),
468 fs.name()->as_C_string(),
469 fs.signature()->as_C_string(),
470 b->size(),
471 b->size(), // so far, alignment constraint == size, will change with Valhalla
472 "INHERITED");
473 found = true;
474 break;
475 }
476 }
477 ik = ik->super();
478 }
479 break;
480 }
481 case LayoutRawBlock::EMPTY:
482 output->print_cr(" @%d %d/1 %s",
483 b->offset(),
484 b->size(),
485 "EMPTY");
486 break;
487 case LayoutRawBlock::PADDING:
488 output->print_cr(" @%d %d/1 %s",
489 b->offset(),
490 b->size(),
491 "PADDING");
492 break;
493 }
494 b = b->next_block();
495 }
496 }
497
498 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
499 GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :
500 _classname(classname),
501 _super_klass(super_klass),
502 _constant_pool(constant_pool),
503 _field_info(field_info),
504 _info(info),
505 _root_group(nullptr),
506 _contended_groups(GrowableArray<FieldGroup*>(8)),
507 _static_fields(nullptr),
508 _layout(nullptr),
509 _static_layout(nullptr),
510 _nonstatic_oopmap_count(0),
511 _alignment(-1),
512 _has_nonstatic_fields(false),
513 _is_contended(is_contended) {}
514
515
516 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
517 assert(g > 0, "must only be called for named contended groups");
518 FieldGroup* fg = nullptr;
519 for (int i = 0; i < _contended_groups.length(); i++) {
520 fg = _contended_groups.at(i);
521 if (fg->contended_group() == g) return fg;
522 }
523 fg = new FieldGroup(g);
524 _contended_groups.append(fg);
525 return fg;
526 }
527
528 void FieldLayoutBuilder::prologue() {
529 _layout = new FieldLayout(_field_info, _constant_pool);
530 const InstanceKlass* super_klass = _super_klass;
531 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
532 if (super_klass != nullptr) {
533 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
534 }
535 _static_layout = new FieldLayout(_field_info, _constant_pool);
536 _static_layout->initialize_static_layout();
537 _static_fields = new FieldGroup();
538 _root_group = new FieldGroup();
539 }
540
541 // Field sorting for regular classes:
542 // - fields are sorted in static and non-static fields
543 // - non-static fields are also sorted according to their contention group
544 // (support of the @Contended annotation)
545 // - @Contended annotation is ignored for static fields
546 void FieldLayoutBuilder::regular_field_sorting() {
547 int idx = 0;
548 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
549 FieldInfo ctrl = _field_info->at(0);
550 FieldGroup* group = nullptr;
551 FieldInfo fieldinfo = *it;
552 if (fieldinfo.access_flags().is_static()) {
553 group = _static_fields;
554 } else {
555 _has_nonstatic_fields = true;
556 if (fieldinfo.field_flags().is_contended()) {
557 int g = fieldinfo.contended_group();
558 if (g == 0) {
559 group = new FieldGroup(true);
560 _contended_groups.append(group);
561 } else {
562 group = get_or_create_contended_group(g);
563 }
564 } else {
565 group = _root_group;
566 }
567 }
568 assert(group != nullptr, "invariant");
569 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
570 switch(type) {
571 case T_BYTE:
572 case T_CHAR:
573 case T_DOUBLE:
574 case T_FLOAT:
575 case T_INT:
576 case T_LONG:
577 case T_SHORT:
578 case T_BOOLEAN:
579 group->add_primitive_field(idx, type);
580 break;
581 case T_OBJECT:
582 case T_ARRAY:
583 if (group != _static_fields) _nonstatic_oopmap_count++;
584 group->add_oop_field(idx);
585 break;
586 default:
587 fatal("Something wrong?");
588 }
589 }
590 _root_group->sort_by_size();
591 _static_fields->sort_by_size();
592 if (!_contended_groups.is_empty()) {
593 for (int i = 0; i < _contended_groups.length(); i++) {
594 _contended_groups.at(i)->sort_by_size();
595 }
596 }
597 }
598
599 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
600 if (ContendedPaddingWidth > 0) {
601 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
602 _layout->insert(slot, padding);
603 }
604 }
605
606 // Computation of regular classes layout is an evolution of the previous default layout
607 // (FieldAllocationStyle 1):
608 // - primitive fields are allocated first (from the biggest to the smallest)
609 // - oop fields are allocated, either in existing gaps or at the end of
610 // the layout. We allocate oops in a single block to have a single oop map entry.
611 // - if the super class ended with an oop, we lead with oops. That will cause the
612 // trailing oop map entry of the super class and the oop map entry of this class
613 // to be folded into a single entry later. Correspondingly, if the super class
614 // ends with a primitive field, we gain nothing by leading with oops; therefore
615 // we let oop fields trail, thus giving future derived classes the chance to apply
616 // the same trick.
617 void FieldLayoutBuilder::compute_regular_layout() {
618 bool need_tail_padding = false;
619 prologue();
620 regular_field_sorting();
621
622 if (_is_contended) {
623 _layout->set_start(_layout->last_block());
624 // insertion is currently easy because the current strategy doesn't try to fill holes
625 // in super classes layouts => the _start block is by consequence the _last_block
626 insert_contended_padding(_layout->start());
627 need_tail_padding = true;
628 }
629
630 if (_super_ends_with_oop) {
631 _layout->add(_root_group->oop_fields());
632 _layout->add(_root_group->primitive_fields());
633 } else {
634 _layout->add(_root_group->primitive_fields());
635 _layout->add(_root_group->oop_fields());
636 }
637
638 if (!_contended_groups.is_empty()) {
639 for (int i = 0; i < _contended_groups.length(); i++) {
640 FieldGroup* cg = _contended_groups.at(i);
641 LayoutRawBlock* start = _layout->last_block();
642 insert_contended_padding(start);
643 _layout->add(cg->primitive_fields(), start);
644 _layout->add(cg->oop_fields(), start);
645 need_tail_padding = true;
646 }
647 }
648
649 if (need_tail_padding) {
650 insert_contended_padding(_layout->last_block());
651 }
652
653 _static_layout->add_contiguously(this->_static_fields->oop_fields());
654 _static_layout->add(this->_static_fields->primitive_fields());
655
656 epilogue();
657 }
658
659 void FieldLayoutBuilder::epilogue() {
660 // Computing oopmaps
661 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
662 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
663
664 OopMapBlocksBuilder* nonstatic_oop_maps =
665 new OopMapBlocksBuilder(max_oop_map_count);
666 if (super_oop_map_count > 0) {
667 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
668 _super_klass->nonstatic_oop_map_count());
669 }
670
671 if (_root_group->oop_fields() != nullptr) {
672 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
673 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
674 nonstatic_oop_maps->add(b->offset(), 1);
675 }
676 }
677
678 if (!_contended_groups.is_empty()) {
679 for (int i = 0; i < _contended_groups.length(); i++) {
680 FieldGroup* cg = _contended_groups.at(i);
681 if (cg->oop_count() > 0) {
682 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
683 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
684 }
685 }
686 }
687
688 nonstatic_oop_maps->compact();
689
690 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
691 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
692 int static_fields_size = (static_fields_end -
693 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
694 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
695
696 // Pass back information needed for InstanceKlass creation
697
698 _info->oop_map_blocks = nonstatic_oop_maps;
699 _info->_instance_size = align_object_size(instance_end / wordSize);
700 _info->_static_field_size = static_fields_size;
701 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
702 _info->_has_nonstatic_fields = _has_nonstatic_fields;
703
704 if (PrintFieldLayout) {
705 ResourceMark rm;
706 tty->print_cr("Layout of class %s", _classname->as_C_string());
707 tty->print_cr("Instance fields:");
708 _layout->print(tty, false, _super_klass);
709 tty->print_cr("Static fields:");
710 _static_layout->print(tty, true, nullptr);
711 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
712 tty->print_cr("---");
713 }
714 }
715
716 void FieldLayoutBuilder::build_layout() {
717 compute_regular_layout();
718 }
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvm.h"
30 #include "memory/resourceArea.hpp"
31 #include "oops/array.hpp"
32 #include "oops/fieldStreams.inline.hpp"
33 #include "oops/inlineKlass.inline.hpp"
34 #include "oops/instanceKlass.inline.hpp"
35 #include "oops/instanceMirrorKlass.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "runtime/fieldDescriptor.inline.hpp"
38 #include "utilities/powerOfTwo.hpp"
39
40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
41 bool use_atomic_flat) {
42
43 if (!UseFieldFlattening) {
44 return LayoutKind::REFERENCE;
45 }
46
47 if (field_info.field_flags().is_injected()) {
48 // don't flatten injected fields
49 return LayoutKind::REFERENCE;
50 }
51
52 if (field_info.access_flags().is_volatile()) {
53 // volatile is used as a keyword to prevent flattening
54 return LayoutKind::REFERENCE;
55 }
56
57 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
58 // field's type is not a known value class, using a reference
59 return LayoutKind::REFERENCE;
60 }
61
62 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
63 InlineKlass* vk = inline_field_info->klass();
64
65 if (field_info.field_flags().is_null_free_inline_type()) {
66 assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
67 if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
68 if (vk->is_naturally_atomic() && vk->has_non_atomic_layout()) return LayoutKind::NON_ATOMIC_FLAT;
69 return (vk->has_atomic_layout() && use_atomic_flat) ? LayoutKind::ATOMIC_FLAT : LayoutKind::REFERENCE;
70 } else {
71 return vk->has_non_atomic_layout() ? LayoutKind::NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
72 }
73 } else {
74 if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
75 return use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
76 } else {
77 return LayoutKind::REFERENCE;
78 }
79 }
80 }
81
82 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
83 switch(kind) {
84 case LayoutKind::NON_ATOMIC_FLAT:
85 *size = vk->non_atomic_size_in_bytes();
86 *alignment = vk->non_atomic_alignment();
87 break;
88 case LayoutKind::ATOMIC_FLAT:
89 *size = vk->atomic_size_in_bytes();
90 *alignment = *size;
91 break;
92 case LayoutKind::NULLABLE_ATOMIC_FLAT:
93 *size = vk->nullable_atomic_size_in_bytes();
94 *alignment = *size;
95 break;
96 default:
97 ShouldNotReachHere();
98 }
99 }
100
101 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
102 _next_block(nullptr),
103 _prev_block(nullptr),
104 _inline_klass(nullptr),
105 _block_kind(kind),
106 _layout_kind(LayoutKind::UNKNOWN),
107 _offset(-1),
108 _alignment(1),
109 _size(size),
110 _field_index(-1) {
111 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
112 "Otherwise, should use the constructor with a field index argument");
113 assert(size > 0, "Sanity check");
114 }
115
116
117 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
118 _next_block(nullptr),
119 _prev_block(nullptr),
120 _inline_klass(nullptr),
121 _block_kind(kind),
122 _layout_kind(LayoutKind::UNKNOWN),
123 _offset(-1),
124 _alignment(alignment),
125 _size(size),
126 _field_index(index) {
127 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
128 "Other kind do not have a field index");
129 assert(size > 0, "Sanity check");
130 assert(alignment > 0, "Sanity check");
131 }
132
133 bool LayoutRawBlock::fit(int size, int alignment) {
134 int adjustment = 0;
135 if ((_offset % alignment) != 0) {
136 adjustment = alignment - (_offset % alignment);
137 }
138 return _size >= size + adjustment;
139 }
140
141 FieldGroup::FieldGroup(int contended_group) :
142 _next(nullptr),
143 _small_primitive_fields(nullptr),
144 _big_primitive_fields(nullptr),
145 _oop_fields(nullptr),
146 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
147 _oop_count(0) {}
148
149 void FieldGroup::add_primitive_field(int idx, BasicType type) {
150 int size = type2aelembytes(type);
151 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
152 if (size >= oopSize) {
153 add_to_big_primitive_list(block);
154 } else {
155 add_to_small_primitive_list(block);
156 }
157 }
158
159 void FieldGroup::add_oop_field(int idx) {
160 int size = type2aelembytes(T_OBJECT);
161 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
162 if (_oop_fields == nullptr) {
163 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
164 }
165 _oop_fields->append(block);
166 _oop_count++;
167 }
168
169 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
170 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
171 block->set_inline_klass(vk);
172 block->set_layout_kind(lk);
173 if (block->size() >= oopSize) {
174 add_to_big_primitive_list(block);
175 } else {
176 add_to_small_primitive_list(block);
177 }
178 }
179
180 void FieldGroup::sort_by_size() {
181 if (_small_primitive_fields != nullptr) {
182 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
183 }
184 if (_big_primitive_fields != nullptr) {
185 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
186 }
187 }
188
189 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
190 if (_small_primitive_fields == nullptr) {
191 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
192 }
193 _small_primitive_fields->append(block);
194 }
195
196 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
197 if (_big_primitive_fields == nullptr) {
198 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
199 }
200 _big_primitive_fields->append(block);
201 }
202
203 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
204 _field_info(field_info),
205 _inline_layout_info_array(inline_layout_info_array),
206 _cp(cp),
207 _blocks(nullptr),
208 _start(_blocks),
209 _last(_blocks),
210 _super_first_field_offset(-1),
211 _super_alignment(-1),
212 _super_min_align_required(-1),
213 _null_reset_value_offset(-1),
214 _acmp_maps_offset(-1),
215 _super_has_fields(false),
216 _has_inherited_fields(false) {}
217
218 void FieldLayout::initialize_static_layout() {
219 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
220 _blocks->set_offset(0);
221 _last = _blocks;
222 _start = _blocks;
223 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
224 // during bootstrapping, the size of the java.lang.Class is still not known when layout
225 // of static field is computed. Field offsets are fixed later when the size is known
226 // (see java_lang_Class::fixup_mirror())
227 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
228 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
229 _blocks->set_offset(0);
230 }
231 }
232
233 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
234 if (super_klass == nullptr) {
235 super_ends_with_oop = false;
236 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
237 _blocks->set_offset(0);
238 _last = _blocks;
239 _start = _blocks;
240 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
241 } else {
242 reconstruct_layout(super_klass, _super_has_fields, super_ends_with_oop);
243 fill_holes(super_klass);
244 if ((!super_klass->has_contended_annotations()) || !_super_has_fields) {
245 _start = _blocks; // start allocating fields from the first empty block
246 } else {
247 _start = _last; // append fields at the end of the reconstructed layout
248 }
249 }
250 }
251
252 LayoutRawBlock* FieldLayout::first_field_block() {
253 LayoutRawBlock* block = _blocks;
254 while (block != nullptr
255 && block->block_kind() != LayoutRawBlock::INHERITED
256 && block->block_kind() != LayoutRawBlock::REGULAR
257 && block->block_kind() != LayoutRawBlock::FLAT
258 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
259 block = block->next_block();
260 }
261 return block;
262 }
263
264 // Insert a set of fields into a layout.
265 // For each field, search for an empty slot able to fit the field
266 // (satisfying both size and alignment requirements), if none is found,
267 // add the field at the end of the layout.
268 // Fields cannot be inserted before the block specified in the "start" argument
269 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
270 if (list == nullptr) return;
271 if (start == nullptr) start = this->_start;
272 bool last_search_success = false;
273 int last_size = 0;
274 int last_alignment = 0;
275 for (int i = 0; i < list->length(); i ++) {
276 LayoutRawBlock* b = list->at(i);
277 LayoutRawBlock* cursor = nullptr;
278 LayoutRawBlock* candidate = nullptr;
279 // if start is the last block, just append the field
280 if (start == last_block()) {
281 candidate = last_block();
282 }
283 // Before iterating over the layout to find an empty slot fitting the field's requirements,
284 // check if the previous field had the same requirements and if the search for a fitting slot
285 // was successful. If the requirements were the same but the search failed, a new search will
286 // fail the same way, so just append the field at the of the layout.
287 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
288 candidate = last_block();
289 } else {
290 // Iterate over the layout to find an empty slot fitting the field's requirements
291 last_size = b->size();
292 last_alignment = b->alignment();
293 cursor = last_block()->prev_block();
294 assert(cursor != nullptr, "Sanity check");
295 last_search_success = true;
296
297 while (cursor != start) {
298 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
299 if (candidate == nullptr || cursor->size() < candidate->size()) {
300 candidate = cursor;
301 }
302 }
303 cursor = cursor->prev_block();
304 }
305 if (candidate == nullptr) {
306 candidate = last_block();
307 last_search_success = false;
308 }
309 assert(candidate != nullptr, "Candidate must not be null");
310 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
311 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
312 }
313 insert_field_block(candidate, b);
314 }
315 }
316
317 // Used for classes with hard coded field offsets, insert a field at the specified offset */
318 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
319 assert(block != nullptr, "Sanity check");
320 block->set_offset(offset);
321 if (start == nullptr) {
322 start = this->_start;
323 }
324 LayoutRawBlock* slot = start;
325 while (slot != nullptr) {
326 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
327 slot == _last){
328 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
329 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
330 if (slot->offset() < block->offset()) {
331 int adjustment = block->offset() - slot->offset();
332 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
333 insert(slot, adj);
334 }
335 insert(slot, block);
336 if (slot->size() == 0) {
337 remove(slot);
338 }
339 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
340 _field_info->adr_at(block->field_index())->set_offset(block->offset());
341 }
342 return;
343 }
344 slot = slot->next_block();
345 }
346 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
347 }
348
349 // The allocation logic uses a best fit strategy: the set of fields is allocated
350 // in the first empty slot big enough to contain the whole set ((including padding
351 // to fit alignment constraints).
352 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
353 if (list == nullptr) return;
354 if (start == nullptr) {
355 start = _start;
356 }
357 // This code assumes that if the first block is well aligned, the following
358 // blocks would naturally be well aligned (no need for adjustment)
359 int size = 0;
360 for (int i = 0; i < list->length(); i++) {
361 size += list->at(i)->size();
362 }
363
364 LayoutRawBlock* candidate = nullptr;
365 if (start == last_block()) {
366 candidate = last_block();
367 } else {
368 LayoutRawBlock* first = list->at(0);
369 candidate = last_block()->prev_block();
370 while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
371 if (candidate == start) {
372 candidate = last_block();
373 break;
374 }
375 candidate = candidate->prev_block();
376 }
377 assert(candidate != nullptr, "Candidate must not be null");
378 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
379 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
380 }
381
382 for (int i = 0; i < list->length(); i++) {
383 LayoutRawBlock* b = list->at(i);
384 insert_field_block(candidate, b);
385 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
386 }
387 }
388
389 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
390 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
391 if (slot->offset() % block->alignment() != 0) {
392 int adjustment = block->alignment() - (slot->offset() % block->alignment());
393 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
394 insert(slot, adj);
395 }
396 assert(block->size() >= block->size(), "Enough space must remain after adjustment");
397 insert(slot, block);
398 if (slot->size() == 0) {
399 remove(slot);
400 }
401 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
402 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
403 _field_info->adr_at(block->field_index())->set_offset(block->offset());
404 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
405 _null_reset_value_offset = block->offset();
406 }
407 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
408 _acmp_maps_offset = block->offset();
409 }
410 }
411 if (block->block_kind() == LayoutRawBlock::FLAT && block->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
412 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
413 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
414 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
415 }
416
417 return block;
418 }
419
420 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
421 has_instance_fields = ends_with_oop = false;
422 if (ik->is_abstract() && !ik->is_identity_class()) {
423 _super_alignment = type2aelembytes(BasicType::T_LONG);
424 }
425 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
426 BasicType last_type;
427 int last_offset = -1;
428 while (ik != nullptr) {
429 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
430 BasicType type = Signature::basic_type(fs.signature());
431 // distinction between static and non-static fields is missing
432 if (fs.access_flags().is_static()) continue;
433 has_instance_fields = true;
434 _has_inherited_fields = true;
435 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
436 _super_first_field_offset = fs.offset();
437 }
438 LayoutRawBlock* block;
439 if (fs.is_flat()) {
440 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
441 InlineKlass* vk = layout_info.klass();
442 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
443 vk->layout_size_in_bytes(layout_info.kind()),
444 vk->layout_alignment(layout_info.kind()));
445 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
446 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
447 } else {
448 int size = type2aelembytes(type);
449 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
450 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
451 // For primitive types, the alignment is equal to the size
452 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
453 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
454 }
455 if (fs.offset() > last_offset) {
456 last_offset = fs.offset();
457 last_type = type;
458 }
459 block->set_offset(fs.offset());
460 all_fields->append(block);
461 }
462 ik = ik->super() == nullptr ? nullptr : ik->super();
463 }
464 assert(last_offset == -1 || last_offset > 0, "Sanity");
465 if (last_offset > 0 &&
466 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
467 ends_with_oop = true;
468 }
469
470 all_fields->sort(LayoutRawBlock::compare_offset);
471 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
472 _blocks->set_offset(0);
473 _last = _blocks;
474 for(int i = 0; i < all_fields->length(); i++) {
475 LayoutRawBlock* b = all_fields->at(i);
476 _last->set_next_block(b);
477 b->set_prev_block(_last);
478 _last = b;
479 }
480 _start = _blocks;
481 }
482
483 // Called during the reconstruction of a layout, after fields from super
484 // classes have been inserted. It fills unused slots between inserted fields
485 // with EMPTY blocks, so the regular field insertion methods would work.
486 // This method handles classes with @Contended annotations differently
487 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
488 // fields to interfere with contended fields/classes.
489 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
490 assert(_blocks != nullptr, "Sanity check");
491 assert(_blocks->offset() == 0, "first block must be at offset zero");
492 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
493 LayoutRawBlock* b = _blocks;
494 while (b->next_block() != nullptr) {
495 if (b->next_block()->offset() > (b->offset() + b->size())) {
496 int size = b->next_block()->offset() - (b->offset() + b->size());
497 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
498 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
499 empty->set_offset(b->offset() + b->size());
500 empty->set_next_block(b->next_block());
501 b->next_block()->set_prev_block(empty);
502 b->set_next_block(empty);
503 empty->set_prev_block(b);
504 }
505 b = b->next_block();
506 }
507 assert(b->next_block() == nullptr, "Invariant at this point");
508 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
509 // If the super class has @Contended annotation, a padding block is
510 // inserted at the end to ensure that fields from the subclasses won't share
511 // the cache line of the last field of the contended class
512 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
513 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
514 p->set_offset(b->offset() + b->size());
515 b->set_next_block(p);
516 p->set_prev_block(b);
517 b = p;
518 }
519
520 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
521 last->set_offset(b->offset() + b->size());
522 assert(last->offset() > 0, "Sanity check");
523 b->set_next_block(last);
524 last->set_prev_block(b);
525 _last = last;
526 }
527
528 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
529 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
530 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
531 block->set_offset(slot->offset());
532 slot->set_offset(slot->offset() + block->size());
533 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
534 assert(slot->size() - block->size() >= 0, "no negative size allowed");
535 slot->set_size(slot->size() - block->size());
536 block->set_prev_block(slot->prev_block());
537 block->set_next_block(slot);
538 slot->set_prev_block(block);
539 if (block->prev_block() != nullptr) {
540 block->prev_block()->set_next_block(block);
541 }
542 if (_blocks == slot) {
543 _blocks = block;
544 }
545 if (_start == slot) {
546 _start = block;
547 }
548 return block;
549 }
550
551 void FieldLayout::remove(LayoutRawBlock* block) {
552 assert(block != nullptr, "Sanity check");
553 assert(block != _last, "Sanity check");
554 if (_blocks == block) {
555 _blocks = block->next_block();
556 if (_blocks != nullptr) {
557 _blocks->set_prev_block(nullptr);
558 }
559 } else {
560 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
561 block->prev_block()->set_next_block(block->next_block());
562 block->next_block()->set_prev_block(block->prev_block());
563 }
564 if (block == _start) {
565 _start = block->prev_block();
566 }
567 }
568
569 void FieldLayout::shift_fields(int shift) {
570 LayoutRawBlock* b = first_field_block();
571 LayoutRawBlock* previous = b->prev_block();
572 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
573 previous->set_size(previous->size() + shift);
574 } else {
575 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
576 nb->set_offset(b->offset());
577 previous->set_next_block(nb);
578 nb->set_prev_block(previous);
579 b->set_prev_block(nb);
580 nb->set_next_block(b);
581 }
582 while (b != nullptr) {
583 b->set_offset(b->offset() + shift);
584 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
585 _field_info->adr_at(b->field_index())->set_offset(b->offset());
586 if (b->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
587 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
588 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
589 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
590
591 }
592 }
593 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
594 b = b->next_block();
595 }
596 }
597
598 LayoutRawBlock* FieldLayout::find_null_marker() {
599 LayoutRawBlock* b = _blocks;
600 while (b != nullptr) {
601 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
602 return b;
603 }
604 b = b->next_block();
605 }
606 ShouldNotReachHere();
607 }
608
609 void FieldLayout::remove_null_marker() {
610 LayoutRawBlock* b = first_field_block();
611 while (b != nullptr) {
612 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
613 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
614 LayoutRawBlock* n = b->next_block();
615 remove(b);
616 n->set_offset(b->offset());
617 n->set_size(n->size() + b->size());
618 } else {
619 b->set_block_kind(LayoutRawBlock::EMPTY);
620 }
621 return;
622 }
623 b = b->next_block();
624 }
625 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
626 }
627
628 static const char* layout_kind_to_string(LayoutKind lk) {
629 switch(lk) {
630 case LayoutKind::REFERENCE:
631 return "REFERENCE";
632 case LayoutKind::NON_ATOMIC_FLAT:
633 return "NON_ATOMIC_FLAT";
634 case LayoutKind::ATOMIC_FLAT:
635 return "ATOMIC_FLAT";
636 case LayoutKind::NULLABLE_ATOMIC_FLAT:
637 return "NULLABLE_ATOMIC_FLAT";
638 case LayoutKind::UNKNOWN:
639 return "UNKNOWN";
640 default:
641 ShouldNotReachHere();
642 }
643 }
644
645 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields) {
646 ResourceMark rm;
647 LayoutRawBlock* b = _blocks;
648 while(b != _last) {
649 switch(b->block_kind()) {
650 case LayoutRawBlock::REGULAR: {
651 FieldInfo* fi = _field_info->adr_at(b->field_index());
652 output->print_cr(" @%d %s %d/%d \"%s\" %s",
653 b->offset(),
654 "REGULAR",
655 b->size(),
656 b->alignment(),
657 fi->name(_cp)->as_C_string(),
658 fi->signature(_cp)->as_C_string());
659 break;
660 }
661 case LayoutRawBlock::FLAT: {
662 FieldInfo* fi = _field_info->adr_at(b->field_index());
663 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
664 assert(ik != nullptr, "");
665 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
666 b->offset(),
667 "FLAT",
668 b->size(),
669 b->alignment(),
670 fi->name(_cp)->as_C_string(),
671 fi->signature(_cp)->as_C_string(),
672 ik->name()->as_C_string(),
673 ik->class_loader_data(), layout_kind_to_string(b->layout_kind()));
674 break;
675 }
676 case LayoutRawBlock::RESERVED: {
677 output->print_cr(" @%d %s %d/-",
678 b->offset(),
679 "RESERVED",
680 b->size());
681 break;
682 }
683 case LayoutRawBlock::INHERITED: {
684 assert(!is_static, "Static fields are not inherited in layouts");
685 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
686 bool found = false;
687 const InstanceKlass* ik = super;
688 while (!found && ik != nullptr) {
689 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
690 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
691 output->print_cr(" @%d %s %d/%d \"%s\" %s",
692 b->offset(),
693 "INHERITED",
694 b->size(),
695 b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
696 fs.name()->as_C_string(),
697 fs.signature()->as_C_string());
698 found = true;
699 break;
700 }
701 }
702 ik = ik->super();
703 }
704 break;
705 }
706 case LayoutRawBlock::EMPTY:
707 output->print_cr(" @%d %s %d/1",
708 b->offset(),
709 "EMPTY",
710 b->size());
711 break;
712 case LayoutRawBlock::PADDING:
713 output->print_cr(" @%d %s %d/1",
714 b->offset(),
715 "PADDING",
716 b->size());
717 break;
718 case LayoutRawBlock::NULL_MARKER:
719 {
720 output->print_cr(" @%d %s %d/1 ",
721 b->offset(),
722 "NULL_MARKER",
723 b->size());
724 break;
725 }
726 default:
727 fatal("Unknown block type");
728 }
729 b = b->next_block();
730 }
731 }
732
733 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
734 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
735 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
736 _classname(classname),
737 _loader_data(loader_data),
738 _super_klass(super_klass),
739 _constant_pool(constant_pool),
740 _field_info(field_info),
741 _info(info),
742 _inline_layout_info_array(inline_layout_info_array),
743 _root_group(nullptr),
744 _contended_groups(GrowableArray<FieldGroup*>(8)),
745 _static_fields(nullptr),
746 _layout(nullptr),
747 _static_layout(nullptr),
748 _nonstatic_oopmap_count(0),
749 _payload_alignment(-1),
750 _payload_offset(-1),
751 _null_marker_offset(-1),
752 _payload_size_in_bytes(-1),
753 _non_atomic_layout_size_in_bytes(-1),
754 _non_atomic_layout_alignment(-1),
755 _atomic_layout_size_in_bytes(-1),
756 _nullable_layout_size_in_bytes(-1),
757 _fields_size_sum(0),
758 _declared_non_static_fields_count(0),
759 _has_non_naturally_atomic_fields(false),
760 _is_naturally_atomic(false),
761 _must_be_atomic(must_be_atomic),
762 _has_nonstatic_fields(false),
763 _has_inline_type_fields(false),
764 _is_contended(is_contended),
765 _is_inline_type(is_inline_type),
766 _is_abstract_value(is_abstract_value),
767 _has_flattening_information(is_inline_type),
768 _is_empty_inline_class(false) {}
769
770 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
771 assert(g > 0, "must only be called for named contended groups");
772 FieldGroup* fg = nullptr;
773 for (int i = 0; i < _contended_groups.length(); i++) {
774 fg = _contended_groups.at(i);
775 if (fg->contended_group() == g) return fg;
776 }
777 fg = new FieldGroup(g);
778 _contended_groups.append(fg);
779 return fg;
780 }
781
782 void FieldLayoutBuilder::prologue() {
783 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
784 const InstanceKlass* super_klass = _super_klass;
785 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
786 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
787 if (super_klass != nullptr) {
788 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
789 }
790 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
791 _static_layout->initialize_static_layout();
792 _static_fields = new FieldGroup();
793 _root_group = new FieldGroup();
794 }
795
796 // Field sorting for regular (non-inline) classes:
797 // - fields are sorted in static and non-static fields
798 // - non-static fields are also sorted according to their contention group
799 // (support of the @Contended annotation)
800 // - @Contended annotation is ignored for static fields
801 // - field flattening decisions are taken in this method
802 void FieldLayoutBuilder::regular_field_sorting() {
803 int idx = 0;
804 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
805 FieldGroup* group = nullptr;
806 FieldInfo fieldinfo = *it;
807 if (fieldinfo.access_flags().is_static()) {
808 group = _static_fields;
809 } else {
810 _has_nonstatic_fields = true;
811 if (fieldinfo.field_flags().is_contended()) {
812 int g = fieldinfo.contended_group();
813 if (g == 0) {
814 group = new FieldGroup(true);
815 _contended_groups.append(group);
816 } else {
817 group = get_or_create_contended_group(g);
818 }
819 } else {
820 group = _root_group;
821 }
822 }
823 assert(group != nullptr, "invariant");
824 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
825 switch(type) {
826 case T_BYTE:
827 case T_CHAR:
828 case T_DOUBLE:
829 case T_FLOAT:
830 case T_INT:
831 case T_LONG:
832 case T_SHORT:
833 case T_BOOLEAN:
834 group->add_primitive_field(idx, type);
835 break;
836 case T_OBJECT:
837 case T_ARRAY:
838 {
839 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
840 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
841 || (!fieldinfo.field_flags().is_injected()
842 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
843 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
844 _has_inline_type_fields = true;
845 _has_flattening_information = true;
846 }
847 if (lk == LayoutKind::REFERENCE) {
848 if (group != _static_fields) _nonstatic_oopmap_count++;
849 group->add_oop_field(idx);
850 } else {
851 _has_flattening_information = true;
852 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
853 int size, alignment;
854 get_size_and_alignment(vk, lk, &size, &alignment);
855 group->add_flat_field(idx, vk, lk, size, alignment);
856 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
857 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
858 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
859 _field_info->adr_at(idx)->set_layout_kind(lk);
860 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
861 }
862 break;
863 }
864 default:
865 fatal("Something wrong?");
866 }
867 }
868 _root_group->sort_by_size();
869 _static_fields->sort_by_size();
870 if (!_contended_groups.is_empty()) {
871 for (int i = 0; i < _contended_groups.length(); i++) {
872 _contended_groups.at(i)->sort_by_size();
873 }
874 }
875 }
876
877 /* Field sorting for inline classes:
878 * - because inline classes are immutable, the @Contended annotation is ignored
879 * when computing their layout (with only read operation, there's no false
880 * sharing issue)
881 * - this method also records the alignment of the field with the most
882 * constraining alignment, this value is then used as the alignment
883 * constraint when flattening this inline type into another container
884 * - field flattening decisions are taken in this method (those decisions are
885 * currently only based in the size of the fields to be flattened, the size
886 * of the resulting instance is not considered)
887 */
888 void FieldLayoutBuilder::inline_class_field_sorting() {
889 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
890 int alignment = -1;
891 int idx = 0;
892 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
893 FieldGroup* group = nullptr;
894 FieldInfo fieldinfo = *it;
895 int field_alignment = 1;
896 if (fieldinfo.access_flags().is_static()) {
897 group = _static_fields;
898 } else {
899 _has_nonstatic_fields = true;
900 _declared_non_static_fields_count++;
901 group = _root_group;
902 }
903 assert(group != nullptr, "invariant");
904 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
905 switch(type) {
906 case T_BYTE:
907 case T_CHAR:
908 case T_DOUBLE:
909 case T_FLOAT:
910 case T_INT:
911 case T_LONG:
912 case T_SHORT:
913 case T_BOOLEAN:
914 if (group != _static_fields) {
915 field_alignment = type2aelembytes(type); // alignment == size for primitive types
916 }
917 group->add_primitive_field(fieldinfo.index(), type);
918 break;
919 case T_OBJECT:
920 case T_ARRAY:
921 {
922 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
923 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
924 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
925 || (!fieldinfo.field_flags().is_injected()
926 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
927 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
928 _has_inline_type_fields = true;
929 _has_flattening_information = true;
930 }
931 if (lk == LayoutKind::REFERENCE) {
932 if (group != _static_fields) {
933 _nonstatic_oopmap_count++;
934 field_alignment = type2aelembytes(type); // alignment == size for oops
935 }
936 group->add_oop_field(idx);
937 } else {
938 _has_flattening_information = true;
939 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
940 if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
941 int size, alignment;
942 get_size_and_alignment(vk, lk, &size, &alignment);
943 group->add_flat_field(idx, vk, lk, size, alignment);
944 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
945 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
946 field_alignment = alignment;
947 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
948 _field_info->adr_at(idx)->set_layout_kind(lk);
949 }
950 break;
951 }
952 default:
953 fatal("Unexpected BasicType");
954 }
955 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
956 }
957 _payload_alignment = alignment;
958 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
959 }
960
961 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
962 if (ContendedPaddingWidth > 0) {
963 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
964 _layout->insert(slot, padding);
965 }
966 }
967
968 // Computation of regular classes layout is an evolution of the previous default layout
969 // (FieldAllocationStyle 1):
970 // - primitive fields (both primitive types and flat inline types) are allocated
971 // first (from the biggest to the smallest)
972 // - oop fields are allocated, either in existing gaps or at the end of
973 // the layout. We allocate oops in a single block to have a single oop map entry.
974 // - if the super class ended with an oop, we lead with oops. That will cause the
975 // trailing oop map entry of the super class and the oop map entry of this class
976 // to be folded into a single entry later. Correspondingly, if the super class
977 // ends with a primitive field, we gain nothing by leading with oops; therefore
978 // we let oop fields trail, thus giving future derived classes the chance to apply
979 // the same trick.
980 void FieldLayoutBuilder::compute_regular_layout() {
981 bool need_tail_padding = false;
982 prologue();
983 regular_field_sorting();
984 if (_is_contended) {
985 _layout->set_start(_layout->last_block());
986 // insertion is currently easy because the current strategy doesn't try to fill holes
987 // in super classes layouts => the _start block is by consequence the _last_block
988 insert_contended_padding(_layout->start());
989 need_tail_padding = true;
990 }
991
992 if (_super_ends_with_oop) {
993 _layout->add(_root_group->oop_fields());
994 _layout->add(_root_group->big_primitive_fields());
995 _layout->add(_root_group->small_primitive_fields());
996 } else {
997 _layout->add(_root_group->big_primitive_fields());
998 _layout->add(_root_group->small_primitive_fields());
999 _layout->add(_root_group->oop_fields());
1000 }
1001
1002 if (!_contended_groups.is_empty()) {
1003 for (int i = 0; i < _contended_groups.length(); i++) {
1004 FieldGroup* cg = _contended_groups.at(i);
1005 LayoutRawBlock* start = _layout->last_block();
1006 insert_contended_padding(start);
1007 _layout->add(cg->big_primitive_fields());
1008 _layout->add(cg->small_primitive_fields(), start);
1009 _layout->add(cg->oop_fields(), start);
1010 need_tail_padding = true;
1011 }
1012 }
1013
1014 if (need_tail_padding) {
1015 insert_contended_padding(_layout->last_block());
1016 }
1017
1018 // Warning: IntanceMirrorKlass expects static oops to be allocated first
1019 _static_layout->add_contiguously(_static_fields->oop_fields());
1020 _static_layout->add(_static_fields->big_primitive_fields());
1021 _static_layout->add(_static_fields->small_primitive_fields());
1022
1023 epilogue();
1024 }
1025
1026 /* Computation of inline classes has a slightly different strategy than for
1027 * regular classes. Regular classes have their oop fields allocated at the end
1028 * of the layout to increase GC performances. Unfortunately, this strategy
1029 * increases the number of empty slots inside an instance. Because the purpose
1030 * of inline classes is to be embedded into other containers, it is critical
1031 * to keep their size as small as possible. For this reason, the allocation
1032 * strategy is:
1033 * - big primitive fields (primitive types and flat inline type smaller
1034 * than an oop) are allocated first (from the biggest to the smallest)
1035 * - then oop fields
1036 * - then small primitive fields (from the biggest to the smallest)
1037 */
1038 void FieldLayoutBuilder::compute_inline_class_layout() {
1039
1040 // Test if the concrete inline class is an empty class (no instance fields)
1041 // and insert a dummy field if needed
1042 if (!_is_abstract_value) {
1043 bool declares_non_static_fields = false;
1044 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
1045 FieldInfo fieldinfo = *it;
1046 if (!fieldinfo.access_flags().is_static()) {
1047 declares_non_static_fields = true;
1048 break;
1049 }
1050 }
1051 if (!declares_non_static_fields) {
1052 bool has_inherited_fields = false;
1053 const InstanceKlass* super = _super_klass;
1054 while(super != nullptr) {
1055 if (super->has_nonstatic_fields()) {
1056 has_inherited_fields = true;
1057 break;
1058 }
1059 super = super->super() == nullptr ? nullptr : InstanceKlass::cast(super->super());
1060 }
1061
1062 if (!has_inherited_fields) {
1063 // Inject ".empty" dummy field
1064 _is_empty_inline_class = true;
1065 FieldInfo::FieldFlags fflags(0);
1066 fflags.update_injected(true);
1067 AccessFlags aflags;
1068 FieldInfo fi(aflags,
1069 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1070 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1071 0,
1072 fflags);
1073 int idx = _field_info->append(fi);
1074 _field_info->adr_at(idx)->set_index(idx);
1075 }
1076 }
1077 }
1078
1079 prologue();
1080 inline_class_field_sorting();
1081
1082 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1083
1084 if (_layout->super_has_fields() && !_is_abstract_value) { // non-static field layout
1085 if (!_has_nonstatic_fields) {
1086 assert(_is_abstract_value, "Concrete value types have at least one field");
1087 // Nothing to do
1088 } else {
1089 // decide which alignment to use, then set first allowed field offset
1090
1091 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1092 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1093
1094 if (_payload_alignment < _layout->super_alignment()) {
1095 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1096 assert(new_alignment % _payload_alignment == 0, "Must be");
1097 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1098 _payload_alignment = new_alignment;
1099 }
1100 _layout->set_start(_layout->first_field_block());
1101 }
1102 } else {
1103 if (_is_abstract_value && _has_nonstatic_fields) {
1104 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1105 }
1106 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1107 LayoutRawBlock* first_empty = _layout->start()->next_block();
1108 if (first_empty->offset() % _payload_alignment != 0) {
1109 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1110 _layout->insert(first_empty, padding);
1111 if (first_empty->size() == 0) {
1112 _layout->remove(first_empty);
1113 }
1114 _layout->set_start(padding);
1115 }
1116 }
1117
1118 _layout->add(_root_group->big_primitive_fields());
1119 _layout->add(_root_group->oop_fields());
1120 _layout->add(_root_group->small_primitive_fields());
1121
1122 LayoutRawBlock* first_field = _layout->first_field_block();
1123 if (first_field != nullptr) {
1124 _payload_offset = _layout->first_field_block()->offset();
1125 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1126 } else {
1127 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1128 _payload_offset = _layout->blocks()->size();
1129 _payload_size_in_bytes = 0;
1130 }
1131
1132 // Determining if the value class is naturally atomic:
1133 if ((!_layout->super_has_fields() && _declared_non_static_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1134 || (_layout->super_has_fields() && _super_klass->is_naturally_atomic() && _declared_non_static_fields_count == 0)) {
1135 _is_naturally_atomic = true;
1136 }
1137
1138 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1139 // From this, additional layouts will be computed: atomic and nullable layouts
1140 // Once those additional layouts are computed, the raw layout might need some adjustments
1141
1142 bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1143
1144 if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1145 // Validation of the non atomic layout
1146 if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1147 _non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1148 _non_atomic_layout_alignment = _payload_alignment;
1149 }
1150
1151 // Next step is to compute the characteristics for a layout enabling atomic updates
1152 if (UseAtomicValueFlattening) {
1153 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1154 if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1155 _atomic_layout_size_in_bytes = atomic_size;
1156 }
1157 }
1158
1159 // Next step is the nullable layout: the layout must include a null marker and must also be atomic
1160 if (UseNullableValueFlattening) {
1161 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1162 // FIXME: could it be possible to re-use the .empty field as a null marker for empty values?
1163 LayoutRawBlock* b = _layout->first_field_block();
1164 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1165 int null_marker_offset = -1;
1166 if (_is_empty_inline_class) {
1167 // Reusing the dummy field as a field marker
1168 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1169 null_marker_offset = b->offset();
1170 } else {
1171 while (b != _layout->last_block()) {
1172 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1173 break;
1174 }
1175 b = b->next_block();
1176 }
1177 if (b != _layout->last_block()) {
1178 // found an empty slot, register its offset from the beginning of the payload
1179 null_marker_offset = b->offset();
1180 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1181 _layout->add_field_at_offset(marker, b->offset());
1182 }
1183 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1184 int last_offset = _layout->last_block()->offset();
1185 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1186 _layout->insert_field_block(_layout->last_block(), marker);
1187 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1188 null_marker_offset = marker->offset();
1189 }
1190 }
1191
1192 // Now that the null marker is there, the size of the nullable layout must computed (remember, must be atomic too)
1193 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1194 int nullable_size = round_up_power_of_2(new_raw_size);
1195 if (nullable_size <= (int)MAX_ATOMIC_OP_SIZE) {
1196 _nullable_layout_size_in_bytes = nullable_size;
1197 _null_marker_offset = null_marker_offset;
1198 } else {
1199 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1200 // from the layout, otherwise it will appear anyway if the layout is printer
1201 if (!_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1202 _layout->remove_null_marker();
1203 }
1204 _null_marker_offset = -1;
1205 }
1206 }
1207 // If the inline class has an atomic or nullable (which is also atomic) layout,
1208 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1209 // could remain simple (single instruction without intermediate copy). This might required
1210 // to shift all fields in the raw layout, but this operation is possible only if the class
1211 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1212 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1213 // and loosely consistent are supported.
1214 int required_alignment = _payload_alignment;
1215 if (has_atomic_layout() && _payload_alignment < atomic_layout_size_in_bytes()) {
1216 required_alignment = atomic_layout_size_in_bytes();
1217 }
1218 if (has_nullable_atomic_layout() && _payload_alignment < nullable_layout_size_in_bytes()) {
1219 required_alignment = nullable_layout_size_in_bytes();
1220 }
1221 int shift = first_field->offset() % required_alignment;
1222 if (shift != 0) {
1223 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1224 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1225 _layout->shift_fields(shift);
1226 _payload_offset = _layout->first_field_block()->offset();
1227 if (has_nullable_atomic_layout()) {
1228 assert(!_is_empty_inline_class, "Should not get here with empty values");
1229 _null_marker_offset = _layout->find_null_marker()->offset();
1230 }
1231 _payload_alignment = required_alignment;
1232 } else {
1233 _atomic_layout_size_in_bytes = -1;
1234 if (has_nullable_atomic_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1235 _layout->remove_null_marker();
1236 }
1237 _nullable_layout_size_in_bytes = -1;
1238 _null_marker_offset = -1;
1239 }
1240 } else {
1241 _payload_alignment = required_alignment;
1242 }
1243
1244 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1245 // instances must also be the nullable layout, in order to be able to set the null marker to
1246 // non-null before copying the payload to other containers.
1247 if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_layout_size_in_bytes()) {
1248 _payload_size_in_bytes = nullable_layout_size_in_bytes();
1249 }
1250 }
1251 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1252 _static_layout->add_contiguously(_static_fields->oop_fields());
1253 _static_layout->add(_static_fields->big_primitive_fields());
1254 _static_layout->add(_static_fields->small_primitive_fields());
1255
1256 if (UseAltSubstitutabilityMethod) {
1257 generate_acmp_maps();
1258 }
1259 epilogue();
1260 }
1261
1262 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1263 InlineKlass* vklass, int offset) {
1264 int diff = offset - vklass->payload_offset();
1265 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1266 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1267 while (map < last_map) {
1268 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1269 map++;
1270 }
1271 }
1272
1273 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1274 if (list == nullptr) return;
1275 for (int i = 0; i < list->length(); i++) {
1276 LayoutRawBlock* f = list->at(i);
1277 if (f->block_kind() == LayoutRawBlock::FLAT) {
1278 InlineKlass* vk = f->inline_klass();
1279 assert(vk != nullptr, "Should have been initialized");
1280 if (vk->contains_oops()) {
1281 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1282 }
1283 }
1284 }
1285 }
1286
1287 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1288 if (group->oop_fields() != nullptr) {
1289 for (int i = 0; i < group->oop_fields()->length(); i++) {
1290 LayoutRawBlock* b = group->oop_fields()->at(i);
1291 nonstatic_oop_maps->add(b->offset(), 1);
1292 }
1293 }
1294 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1295 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1296 }
1297
1298 static int insert_segment(GrowableArray<Pair<int,int>>* map, int offset, int size, int last_idx) {
1299 if (map->is_empty()) {
1300 return map->append(Pair<int,int>(offset, size));
1301 }
1302 last_idx = last_idx == -1 ? 0 : last_idx;
1303 int start = map->adr_at(last_idx)->first > offset ? 0 : last_idx;
1304 bool inserted = false;
1305 for (int c = start; c < map->length(); c++) {
1306 if (offset == (map->adr_at(c)->first + map->adr_at(c)->second)) {
1307 //contiguous to the last field, can be coalesced
1308 map->adr_at(c)->second = map->adr_at(c)->second + size;
1309 inserted = true;
1310 break; // break out of the for loop
1311 }
1312 if (offset < (map->adr_at(c)->first)) {
1313 map->insert_before(c, Pair<int,int>(offset, size));
1314 last_idx = c;
1315 inserted = true;
1316 break; // break out of the for loop
1317 }
1318 }
1319 if (!inserted) {
1320 last_idx = map->append(Pair<int,int>(offset, size));
1321 }
1322 return last_idx;
1323 }
1324
1325 static int insert_map_at_offset(GrowableArray<Pair<int,int>>* nonoop_map, GrowableArray<int>* oop_map,
1326 const InstanceKlass* ik, int offset, int payload_offset, int last_idx) {
1327 oop mirror = ik->java_mirror();
1328 oop array = mirror->obj_field(ik->acmp_maps_offset());
1329 assert(array != nullptr, "Sanity check");
1330 typeArrayOop fmap = (typeArrayOop)array;
1331 typeArrayHandle fmap_h(Thread::current(), fmap);
1332 int nb_nonoop_field = fmap_h->int_at(0);
1333 int field_offset = offset - payload_offset;
1334 for (int i = 0; i < nb_nonoop_field; i++) {
1335 last_idx = insert_segment(nonoop_map,
1336 field_offset + fmap_h->int_at( i * 2 + 1),
1337 fmap_h->int_at( i * 2 + 2), last_idx);
1338 }
1339 int len = fmap_h->length();
1340 for (int i = nb_nonoop_field * 2 + 1; i < len; i++) {
1341 oop_map->append(field_offset + fmap_h->int_at(i));
1342 }
1343 return last_idx;
1344 }
1345
1346 static void split_after(GrowableArray<Pair<int,int>>* map, int idx, int head) {
1347 int offset = map->adr_at(idx)->first;
1348 int size = map->adr_at(idx)->second;
1349 if (size <= head) return;
1350 map->adr_at(idx)->first = offset + head;
1351 map->adr_at(idx)->second = size - head;
1352 map->insert_before(idx, Pair<int,int>(offset, head));
1353
1354 }
1355
1356 void FieldLayoutBuilder::generate_acmp_maps() {
1357 assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1358
1359 // create/initialize current class' maps
1360 // The Pair<int,int> values in the nonoop_acmp_map represent <offset,size> segments of memory
1361 _nonoop_acmp_map = new GrowableArray<Pair<int,int>>();
1362 _oop_acmp_map = new GrowableArray<int>();
1363 if (_is_empty_inline_class) return;
1364 // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1365 // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1366 // next to the previous insertion. However, in some cases local fields and inherited fields can be
1367 // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1368 int last_idx = -1;
1369 if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) { // Assumes j.l.Object cannot have fields
1370 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, 0, last_idx);
1371 }
1372
1373 // Processing local fields
1374 LayoutRawBlock* b = _layout->blocks();
1375 while(b != _layout->last_block()) {
1376 switch(b->block_kind()) {
1377 case LayoutRawBlock::RESERVED:
1378 case LayoutRawBlock::EMPTY:
1379 case LayoutRawBlock::PADDING:
1380 case LayoutRawBlock::NULL_MARKER:
1381 case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1382 // skip
1383 break;
1384
1385 case LayoutRawBlock::REGULAR:
1386 {
1387 FieldInfo* fi = _field_info->adr_at(b->field_index());
1388 if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1389 _oop_acmp_map->append(b->offset());
1390 } else {
1391 // Non-oop case
1392 last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1393 }
1394 break;
1395 }
1396 case LayoutRawBlock::FLAT:
1397 {
1398 InlineKlass* vk = b->inline_klass();
1399 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, b->offset(), vk->payload_offset(), last_idx);
1400 if (b->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
1401 int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1402 last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1403 // Important note: the implementation assumes that for nullable flat fields, if the
1404 // null marker is zero (field is null), then all the fields of the flat field are also
1405 // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1406 // all fields are included in the map, plus the null marker
1407 // If it happens that the assumption above is wrong, then nullable flat fields would
1408 // require a dedicated section in the acmp map, and be handled differently: null_marker
1409 // comparison first, and if null markers are identical and non-zero, then conditional
1410 // comparison of the other fields
1411 }
1412 }
1413 break;
1414
1415 }
1416 b = b->next_block();
1417 }
1418
1419 // split segments into well-aligned blocks
1420 int idx = 0;
1421 while (idx < _nonoop_acmp_map->length()) {
1422 int offset = _nonoop_acmp_map->adr_at(idx)->first;
1423 int size = _nonoop_acmp_map->adr_at(idx)->second;
1424 int mod = offset % 8;
1425 switch (mod) {
1426 case 0:
1427 break;
1428 case 4:
1429 split_after(_nonoop_acmp_map, idx, 4);
1430 break;
1431 case 2:
1432 case 6:
1433 split_after(_nonoop_acmp_map, idx, 2);
1434 break;
1435 case 1:
1436 case 3:
1437 case 5:
1438 case 7:
1439 split_after(_nonoop_acmp_map, idx, 1);
1440 break;
1441 default:
1442 ShouldNotReachHere();
1443 }
1444 idx++;
1445 }
1446 }
1447
1448 void FieldLayoutBuilder::epilogue() {
1449 // Computing oopmaps
1450 OopMapBlocksBuilder* nonstatic_oop_maps =
1451 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1452 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1453 if (super_oop_map_count > 0) {
1454 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1455 _super_klass->nonstatic_oop_map_count());
1456 }
1457 register_embedded_oops(nonstatic_oop_maps, _root_group);
1458 if (!_contended_groups.is_empty()) {
1459 for (int i = 0; i < _contended_groups.length(); i++) {
1460 FieldGroup* cg = _contended_groups.at(i);
1461 if (cg->oop_count() > 0) {
1462 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1463 register_embedded_oops(nonstatic_oop_maps, cg);
1464 }
1465 }
1466 }
1467 nonstatic_oop_maps->compact();
1468
1469 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1470 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1471 int static_fields_size = (static_fields_end -
1472 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1473 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1474
1475 // Pass back information needed for InstanceKlass creation
1476
1477 _info->oop_map_blocks = nonstatic_oop_maps;
1478 _info->_instance_size = align_object_size(instance_end / wordSize);
1479 _info->_static_field_size = static_fields_size;
1480 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1481 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1482 _info->_has_inline_fields = _has_inline_type_fields;
1483 _info->_is_naturally_atomic = _is_naturally_atomic;
1484 if (_is_inline_type) {
1485 _info->_must_be_atomic = _must_be_atomic;
1486 _info->_payload_alignment = _payload_alignment;
1487 _info->_payload_offset = _payload_offset;
1488 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1489 _info->_non_atomic_size_in_bytes = _non_atomic_layout_size_in_bytes;
1490 _info->_non_atomic_alignment = _non_atomic_layout_alignment;
1491 _info->_atomic_layout_size_in_bytes = _atomic_layout_size_in_bytes;
1492 _info->_nullable_layout_size_in_bytes = _nullable_layout_size_in_bytes;
1493 _info->_null_marker_offset = _null_marker_offset;
1494 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1495 _info->_is_empty_inline_klass = _is_empty_inline_class;
1496 }
1497
1498 // Acmp maps are needed for both concrete and abstract value classes
1499 if (UseAltSubstitutabilityMethod && (_is_inline_type || _is_abstract_value)) {
1500 _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1501 _info->_nonoop_acmp_map = _nonoop_acmp_map;
1502 _info->_oop_acmp_map = _oop_acmp_map;
1503 }
1504
1505 // This may be too restrictive, since if all the fields fit in 64
1506 // bits we could make the decision to align instances of this class
1507 // to 64-bit boundaries, and load and store them as single words.
1508 // And on machines which supported larger atomics we could similarly
1509 // allow larger values to be atomic, if properly aligned.
1510
1511 #ifdef ASSERT
1512 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1513 // which prints the details of LayoutRawBlocks used to compute the layout.
1514 // The code below checks that offsets in the _field_info meta-data match offsets
1515 // in the LayoutRawBlocks
1516 LayoutRawBlock* b = _layout->blocks();
1517 while(b != _layout->last_block()) {
1518 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1519 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1520 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1521 }
1522 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1523 }
1524 b = b->next_block();
1525 }
1526 b = _static_layout->blocks();
1527 while(b != _static_layout->last_block()) {
1528 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1529 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1530 }
1531 b = b->next_block();
1532 }
1533 #endif // ASSERT
1534
1535 static bool first_layout_print = true;
1536
1537
1538 if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
1539 ResourceMark rm;
1540 stringStream st;
1541 if (first_layout_print) {
1542 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1543 st.print_cr("Heap oop size = %d", heapOopSize);
1544 first_layout_print = false;
1545 }
1546 if (_super_klass != nullptr) {
1547 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1548 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1549 } else {
1550 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1551 }
1552 st.print_cr("Instance fields:");
1553 _layout->print(&st, false, _super_klass, _inline_layout_info_array);
1554 st.print_cr("Static fields:");
1555 _static_layout->print(&st, true, nullptr, _inline_layout_info_array);
1556 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1557 if (_is_inline_type) {
1558 st.print_cr("First field offset = %d", _payload_offset);
1559 st.print_cr("Payload layout: %d/%d", _payload_size_in_bytes, _payload_alignment);
1560 if (has_non_atomic_flat_layout()) {
1561 st.print_cr("Non atomic flat layout: %d/%d", _non_atomic_layout_size_in_bytes, _non_atomic_layout_alignment);
1562 } else {
1563 st.print_cr("Non atomic flat layout: -/-");
1564 }
1565 if (has_atomic_layout()) {
1566 st.print_cr("Atomic flat layout: %d/%d", _atomic_layout_size_in_bytes, _atomic_layout_size_in_bytes);
1567 } else {
1568 st.print_cr("Atomic flat layout: -/-");
1569 }
1570 if (has_nullable_atomic_layout()) {
1571 st.print_cr("Nullable flat layout: %d/%d", _nullable_layout_size_in_bytes, _nullable_layout_size_in_bytes);
1572 } else {
1573 st.print_cr("Nullable flat layout: -/-");
1574 }
1575 if (_null_marker_offset != -1) {
1576 st.print_cr("Null marker offset = %d", _null_marker_offset);
1577 }
1578 if (UseAltSubstitutabilityMethod) {
1579 st.print("Non-oop acmp map: ");
1580 for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1581 st.print("<%d,%d>, ", _nonoop_acmp_map->at(i).first, _nonoop_acmp_map->at(i).second);
1582 }
1583 st.print_cr("");
1584 st.print("oop acmp map: ");
1585 for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1586 st.print("%d, ", _oop_acmp_map->at(i));
1587 }
1588 st.print_cr("");
1589 }
1590 }
1591 st.print_cr("---");
1592 // Print output all together.
1593 tty->print_raw(st.as_string());
1594 }
1595 }
1596
1597 void FieldLayoutBuilder::build_layout() {
1598 if (_is_inline_type || _is_abstract_value) {
1599 compute_inline_class_layout();
1600 } else {
1601 compute_regular_layout();
1602 }
1603 }
|