7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "jvm.h"
28 #include "memory/resourceArea.hpp"
29 #include "oops/array.hpp"
30 #include "oops/fieldStreams.inline.hpp"
31 #include "oops/instanceKlass.inline.hpp"
32 #include "oops/instanceMirrorKlass.hpp"
33 #include "oops/klass.inline.hpp"
34 #include "runtime/fieldDescriptor.inline.hpp"
35
36
37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
38 _next_block(nullptr),
39 _prev_block(nullptr),
40 _block_kind(kind),
41 _offset(-1),
42 _alignment(1),
43 _size(size),
44 _field_index(-1),
45 _is_reference(false) {
46 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
47 "Otherwise, should use the constructor with a field index argument");
48 assert(size > 0, "Sanity check");
49 }
50
51
52 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
53 _next_block(nullptr),
54 _prev_block(nullptr),
55 _block_kind(kind),
56 _offset(-1),
57 _alignment(alignment),
58 _size(size),
59 _field_index(index),
60 _is_reference(is_reference) {
61 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
62 "Other kind do not have a field index");
63 assert(size > 0, "Sanity check");
64 assert(alignment > 0, "Sanity check");
65 }
66
67 bool LayoutRawBlock::fit(int size, int alignment) {
68 int adjustment = 0;
69 if ((_offset % alignment) != 0) {
70 adjustment = alignment - (_offset % alignment);
71 }
72 return _size >= size + adjustment;
73 }
74
75 FieldGroup::FieldGroup(int contended_group) :
76 _next(nullptr),
77 _primitive_fields(nullptr),
78 _oop_fields(nullptr),
79 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
80 _oop_count(0) {}
81
82 void FieldGroup::add_primitive_field(int idx, BasicType type) {
83 int size = type2aelembytes(type);
84 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
85 if (_primitive_fields == nullptr) {
86 _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
87 }
88 _primitive_fields->append(block);
89 }
90
91 void FieldGroup::add_oop_field(int idx) {
92 int size = type2aelembytes(T_OBJECT);
93 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
94 if (_oop_fields == nullptr) {
95 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
96 }
97 _oop_fields->append(block);
98 _oop_count++;
99 }
100
101 void FieldGroup::sort_by_size() {
102 if (_primitive_fields != nullptr) {
103 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
104 }
105 }
106
107 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
108 _field_info(field_info),
109 _cp(cp),
110 _blocks(nullptr),
111 _start(_blocks),
112 _last(_blocks) {}
113
114 void FieldLayout::initialize_static_layout() {
115 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
116 _blocks->set_offset(0);
117 _last = _blocks;
118 _start = _blocks;
119 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
120 // during bootstrapping, the size of the java.lang.Class is still not known when layout
121 // of static field is computed. Field offsets are fixed later when the size is known
122 // (see java_lang_Class::fixup_mirror())
123 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
124 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
125 _blocks->set_offset(0);
126 }
127 }
128
129 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
130 if (super_klass == nullptr) {
131 super_ends_with_oop = false;
132 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
133 _blocks->set_offset(0);
134 _last = _blocks;
135 _start = _blocks;
136 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
137 } else {
138 bool super_has_instance_fields = false;
139 reconstruct_layout(super_klass, super_has_instance_fields, super_ends_with_oop);
140 fill_holes(super_klass);
141 if (!super_klass->has_contended_annotations() || !super_has_instance_fields) {
142 _start = _blocks; // start allocating fields from the first empty block
143 } else {
144 _start = _last; // append fields at the end of the reconstructed layout
145 }
146 }
147 }
148
149 LayoutRawBlock* FieldLayout::first_field_block() {
150 LayoutRawBlock* block = _start;
151 while (block->block_kind() != LayoutRawBlock::INHERITED && block->block_kind() != LayoutRawBlock::REGULAR
152 && block->block_kind() != LayoutRawBlock::FLATTENED && block->block_kind() != LayoutRawBlock::PADDING) {
153 block = block->next_block();
154 }
155 return block;
156 }
157
158
159 // Insert a set of fields into a layout using a best-fit strategy.
160 // For each field, search for the smallest empty slot able to fit the field
161 // (satisfying both size and alignment requirements), if none is found,
162 // add the field at the end of the layout.
163 // Fields cannot be inserted before the block specified in the "start" argument
164 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
165 if (list == nullptr) return;
166 if (start == nullptr) start = this->_start;
167 bool last_search_success = false;
168 int last_size = 0;
169 int last_alignment = 0;
170 for (int i = 0; i < list->length(); i ++) {
171 LayoutRawBlock* b = list->at(i);
172 LayoutRawBlock* cursor = nullptr;
173 LayoutRawBlock* candidate = nullptr;
174
175 // if start is the last block, just append the field
176 if (start == last_block()) {
177 candidate = last_block();
178 }
179 // Before iterating over the layout to find an empty slot fitting the field's requirements,
180 // check if the previous field had the same requirements and if the search for a fitting slot
181 // was successful. If the requirements were the same but the search failed, a new search will
182 // fail the same way, so just append the field at the of the layout.
183 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
184 candidate = last_block();
185 } else {
186 // Iterate over the layout to find an empty slot fitting the field's requirements
187 last_size = b->size();
188 last_alignment = b->alignment();
189 cursor = last_block()->prev_block();
190 assert(cursor != nullptr, "Sanity check");
191 last_search_success = true;
192 while (cursor != start) {
193 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
194 if (candidate == nullptr || cursor->size() < candidate->size()) {
195 candidate = cursor;
196 }
197 }
198 cursor = cursor->prev_block();
199 }
200 if (candidate == nullptr) {
201 candidate = last_block();
202 last_search_success = false;
203 }
204 assert(candidate != nullptr, "Candidate must not be null");
205 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
206 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
207 }
208
209 insert_field_block(candidate, b);
210 }
211 }
212
213 // Used for classes with hard coded field offsets, insert a field at the specified offset */
214 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
215 assert(block != nullptr, "Sanity check");
216 block->set_offset(offset);
217 if (start == nullptr) {
218 start = this->_start;
219 }
220 LayoutRawBlock* slot = start;
221 while (slot != nullptr) {
222 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
223 slot == _last){
224 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
225 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
226 if (slot->offset() < block->offset()) {
227 int adjustment = block->offset() - slot->offset();
228 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
229 insert(slot, adj);
230 }
231 insert(slot, block);
232 if (slot->size() == 0) {
233 remove(slot);
234 }
235 _field_info->adr_at(block->field_index())->set_offset(block->offset());
236 return;
237 }
238 slot = slot->next_block();
239 }
240 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
241 }
242
243 // The allocation logic uses a best fit strategy: the set of fields is allocated
244 // in the first empty slot big enough to contain the whole set ((including padding
245 // to fit alignment constraints).
246 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
247 if (list == nullptr) return;
248 if (start == nullptr) {
249 start = _start;
250 }
251 // This code assumes that if the first block is well aligned, the following
252 // blocks would naturally be well aligned (no need for adjustment)
253 int size = 0;
254 for (int i = 0; i < list->length(); i++) {
255 size += list->at(i)->size();
270 }
271 assert(candidate != nullptr, "Candidate must not be null");
272 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
273 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
274 }
275
276 for (int i = 0; i < list->length(); i++) {
277 LayoutRawBlock* b = list->at(i);
278 insert_field_block(candidate, b);
279 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
280 }
281 }
282
283 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
284 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
285 if (slot->offset() % block->alignment() != 0) {
286 int adjustment = block->alignment() - (slot->offset() % block->alignment());
287 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
288 insert(slot, adj);
289 }
290 insert(slot, block);
291 if (slot->size() == 0) {
292 remove(slot);
293 }
294 _field_info->adr_at(block->field_index())->set_offset(block->offset());
295 return block;
296 }
297
298 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
299 has_instance_fields = ends_with_oop = false;
300 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
301 BasicType last_type;
302 int last_offset = -1;
303 while (ik != nullptr) {
304 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
305 BasicType type = Signature::basic_type(fs.signature());
306 // distinction between static and non-static fields is missing
307 if (fs.access_flags().is_static()) continue;
308 has_instance_fields = true;
309 if (fs.offset() > last_offset) {
310 last_offset = fs.offset();
311 last_type = type;
312 }
313 int size = type2aelembytes(type);
314 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
315 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
316 block->set_offset(fs.offset());
317 all_fields->append(block);
318 }
319 ik = ik->super() == nullptr ? nullptr : ik->super();
320 }
321 assert(last_offset == -1 || last_offset > 0, "Sanity");
322 if (last_offset > 0 &&
323 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
324 ends_with_oop = true;
325 }
326
327 all_fields->sort(LayoutRawBlock::compare_offset);
328 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
329 _blocks->set_offset(0);
330 _last = _blocks;
331
332 for(int i = 0; i < all_fields->length(); i++) {
333 LayoutRawBlock* b = all_fields->at(i);
334 _last->set_next_block(b);
335 b->set_prev_block(_last);
336 _last = b;
337 }
338 _start = _blocks;
339 }
340
341 // Called during the reconstruction of a layout, after fields from super
342 // classes have been inserted. It fills unused slots between inserted fields
343 // with EMPTY blocks, so the regular field insertion methods would work.
344 // This method handles classes with @Contended annotations differently
345 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
346 // fields to interfere with contended fields/classes.
347 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
348 assert(_blocks != nullptr, "Sanity check");
349 assert(_blocks->offset() == 0, "first block must be at offset zero");
350 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
351 LayoutRawBlock* b = _blocks;
352 while (b->next_block() != nullptr) {
353 if (b->next_block()->offset() > (b->offset() + b->size())) {
354 int size = b->next_block()->offset() - (b->offset() + b->size());
355 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
356 empty->set_offset(b->offset() + b->size());
357 empty->set_next_block(b->next_block());
358 b->next_block()->set_prev_block(empty);
359 b->set_next_block(empty);
360 empty->set_prev_block(b);
361 }
362 b = b->next_block();
363 }
364 assert(b->next_block() == nullptr, "Invariant at this point");
365 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
366
367 // If the super class has @Contended annotation, a padding block is
368 // inserted at the end to ensure that fields from the subclasses won't share
369 // the cache line of the last field of the contended class
370 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
371 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
372 p->set_offset(b->offset() + b->size());
373 b->set_next_block(p);
374 p->set_prev_block(b);
375 b = p;
376 }
377
378 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
379 last->set_offset(b->offset() + b->size());
380 assert(last->offset() > 0, "Sanity check");
381 b->set_next_block(last);
382 last->set_prev_block(b);
383 _last = last;
384 }
385
386 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
387 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
388 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
389 block->set_offset(slot->offset());
390 slot->set_offset(slot->offset() + block->size());
391 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
392 assert(slot->size() - block->size() >= 0, "no negative size allowed");
393 slot->set_size(slot->size() - block->size());
394 block->set_prev_block(slot->prev_block());
395 block->set_next_block(slot);
396 slot->set_prev_block(block);
397 if (block->prev_block() != nullptr) {
398 block->prev_block()->set_next_block(block);
399 }
400 if (_blocks == slot) {
401 _blocks = block;
402 }
403 return block;
404 }
405
406 void FieldLayout::remove(LayoutRawBlock* block) {
407 assert(block != nullptr, "Sanity check");
408 assert(block != _last, "Sanity check");
409 if (_blocks == block) {
410 _blocks = block->next_block();
411 if (_blocks != nullptr) {
412 _blocks->set_prev_block(nullptr);
413 }
414 } else {
415 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
416 block->prev_block()->set_next_block(block->next_block());
417 block->next_block()->set_prev_block(block->prev_block());
418 }
419 if (block == _start) {
420 _start = block->prev_block();
421 }
422 }
423
424 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
425 ResourceMark rm;
426 LayoutRawBlock* b = _blocks;
427 while(b != _last) {
428 switch(b->block_kind()) {
429 case LayoutRawBlock::REGULAR: {
430 FieldInfo* fi = _field_info->adr_at(b->field_index());
431 output->print_cr(" @%d \"%s\" %s %d/%d %s",
432 b->offset(),
433 fi->name(_cp)->as_C_string(),
434 fi->signature(_cp)->as_C_string(),
435 b->size(),
436 b->alignment(),
437 "REGULAR");
438 break;
439 }
440 case LayoutRawBlock::FLATTENED: {
441 FieldInfo* fi = _field_info->adr_at(b->field_index());
442 output->print_cr(" @%d \"%s\" %s %d/%d %s",
443 b->offset(),
444 fi->name(_cp)->as_C_string(),
445 fi->signature(_cp)->as_C_string(),
446 b->size(),
447 b->alignment(),
448 "FLATTENED");
449 break;
450 }
451 case LayoutRawBlock::RESERVED: {
452 output->print_cr(" @%d %d/- %s",
453 b->offset(),
454 b->size(),
455 "RESERVED");
456 break;
457 }
458 case LayoutRawBlock::INHERITED: {
459 assert(!is_static, "Static fields are not inherited in layouts");
460 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
461 bool found = false;
462 const InstanceKlass* ik = super;
463 while (!found && ik != nullptr) {
464 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
465 if (fs.offset() == b->offset()) {
466 output->print_cr(" @%d \"%s\" %s %d/%d %s",
467 b->offset(),
468 fs.name()->as_C_string(),
469 fs.signature()->as_C_string(),
470 b->size(),
471 b->size(), // so far, alignment constraint == size, will change with Valhalla
472 "INHERITED");
473 found = true;
474 break;
475 }
476 }
477 ik = ik->super();
478 }
479 break;
480 }
481 case LayoutRawBlock::EMPTY:
482 output->print_cr(" @%d %d/1 %s",
483 b->offset(),
484 b->size(),
485 "EMPTY");
486 break;
487 case LayoutRawBlock::PADDING:
488 output->print_cr(" @%d %d/1 %s",
489 b->offset(),
490 b->size(),
491 "PADDING");
492 break;
493 }
494 b = b->next_block();
495 }
496 }
497
498 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
499 GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :
500 _classname(classname),
501 _super_klass(super_klass),
502 _constant_pool(constant_pool),
503 _field_info(field_info),
504 _info(info),
505 _root_group(nullptr),
506 _contended_groups(GrowableArray<FieldGroup*>(8)),
507 _static_fields(nullptr),
508 _layout(nullptr),
509 _static_layout(nullptr),
510 _nonstatic_oopmap_count(0),
511 _alignment(-1),
512 _has_nonstatic_fields(false),
513 _is_contended(is_contended) {}
514
515
516 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
517 assert(g > 0, "must only be called for named contended groups");
518 FieldGroup* fg = nullptr;
519 for (int i = 0; i < _contended_groups.length(); i++) {
520 fg = _contended_groups.at(i);
521 if (fg->contended_group() == g) return fg;
522 }
523 fg = new FieldGroup(g);
524 _contended_groups.append(fg);
525 return fg;
526 }
527
528 void FieldLayoutBuilder::prologue() {
529 _layout = new FieldLayout(_field_info, _constant_pool);
530 const InstanceKlass* super_klass = _super_klass;
531 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
532 if (super_klass != nullptr) {
533 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
534 }
535 _static_layout = new FieldLayout(_field_info, _constant_pool);
536 _static_layout->initialize_static_layout();
537 _static_fields = new FieldGroup();
538 _root_group = new FieldGroup();
539 }
540
541 // Field sorting for regular classes:
542 // - fields are sorted in static and non-static fields
543 // - non-static fields are also sorted according to their contention group
544 // (support of the @Contended annotation)
545 // - @Contended annotation is ignored for static fields
546 void FieldLayoutBuilder::regular_field_sorting() {
547 int idx = 0;
548 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
549 FieldInfo ctrl = _field_info->at(0);
550 FieldGroup* group = nullptr;
551 FieldInfo fieldinfo = *it;
552 if (fieldinfo.access_flags().is_static()) {
553 group = _static_fields;
554 } else {
555 _has_nonstatic_fields = true;
556 if (fieldinfo.field_flags().is_contended()) {
557 int g = fieldinfo.contended_group();
558 if (g == 0) {
559 group = new FieldGroup(true);
560 _contended_groups.append(group);
561 } else {
562 group = get_or_create_contended_group(g);
563 }
564 } else {
565 group = _root_group;
566 }
567 }
568 assert(group != nullptr, "invariant");
569 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
570 switch(type) {
571 case T_BYTE:
572 case T_CHAR:
573 case T_DOUBLE:
574 case T_FLOAT:
575 case T_INT:
576 case T_LONG:
577 case T_SHORT:
578 case T_BOOLEAN:
579 group->add_primitive_field(idx, type);
580 break;
581 case T_OBJECT:
582 case T_ARRAY:
583 if (group != _static_fields) _nonstatic_oopmap_count++;
584 group->add_oop_field(idx);
585 break;
586 default:
587 fatal("Something wrong?");
588 }
589 }
590 _root_group->sort_by_size();
591 _static_fields->sort_by_size();
592 if (!_contended_groups.is_empty()) {
593 for (int i = 0; i < _contended_groups.length(); i++) {
594 _contended_groups.at(i)->sort_by_size();
595 }
596 }
597 }
598
599 LayoutRawBlock* FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
600 LayoutRawBlock* padding = nullptr;
601 if (ContendedPaddingWidth > 0) {
602 padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
603 _layout->insert(slot, padding);
604 }
605 return padding;
606 }
607
608 // Computation of regular classes layout is an evolution of the previous default layout
609 // (FieldAllocationStyle 1):
610 // - primitive fields are allocated first (from the biggest to the smallest)
611 // - oop fields are allocated, either in existing gaps or at the end of
612 // the layout. We allocate oops in a single block to have a single oop map entry.
613 // - if the super class ended with an oop, we lead with oops. That will cause the
614 // trailing oop map entry of the super class and the oop map entry of this class
615 // to be folded into a single entry later. Correspondingly, if the super class
616 // ends with a primitive field, we gain nothing by leading with oops; therefore
617 // we let oop fields trail, thus giving future derived classes the chance to apply
618 // the same trick.
619 void FieldLayoutBuilder::compute_regular_layout() {
620 bool need_tail_padding = false;
621 prologue();
622 regular_field_sorting();
623
624 if (_is_contended) {
625 // insertion is currently easy because the current strategy doesn't try to fill holes
626 // in super classes layouts => the _start block is by consequence the _last_block
627 _layout->set_start(_layout->last_block());
628 LayoutRawBlock* padding = insert_contended_padding(_layout->start());
629 if (padding != nullptr) {
630 // Setting the padding block as start ensures we do not insert past it.
631 _layout->set_start(padding);
632 }
633 need_tail_padding = true;
634 }
635
636 if (_super_ends_with_oop) {
637 _layout->add(_root_group->oop_fields());
638 _layout->add(_root_group->primitive_fields());
639 } else {
640 _layout->add(_root_group->primitive_fields());
641 _layout->add(_root_group->oop_fields());
642 }
643
644 if (!_contended_groups.is_empty()) {
645 for (int i = 0; i < _contended_groups.length(); i++) {
646 FieldGroup* cg = _contended_groups.at(i);
647 LayoutRawBlock* start = _layout->last_block();
648 LayoutRawBlock* padding = insert_contended_padding(start);
649
650 // Do not insert fields past the padding block.
651 if (padding != nullptr) {
652 start = padding;
653 }
654
655 _layout->add(cg->primitive_fields(), start);
656 _layout->add(cg->oop_fields(), start);
657 need_tail_padding = true;
658 }
659 }
660
661 if (need_tail_padding) {
662 insert_contended_padding(_layout->last_block());
663 }
664
665 _static_layout->add_contiguously(this->_static_fields->oop_fields());
666 _static_layout->add(this->_static_fields->primitive_fields());
667
668 epilogue();
669 }
670
671 void FieldLayoutBuilder::epilogue() {
672 // Computing oopmaps
673 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
674 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
675
676 OopMapBlocksBuilder* nonstatic_oop_maps =
677 new OopMapBlocksBuilder(max_oop_map_count);
678 if (super_oop_map_count > 0) {
679 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
680 _super_klass->nonstatic_oop_map_count());
681 }
682
683 if (_root_group->oop_fields() != nullptr) {
684 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
685 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
686 nonstatic_oop_maps->add(b->offset(), 1);
687 }
688 }
689
690 if (!_contended_groups.is_empty()) {
691 for (int i = 0; i < _contended_groups.length(); i++) {
692 FieldGroup* cg = _contended_groups.at(i);
693 if (cg->oop_count() > 0) {
694 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
695 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
696 }
697 }
698 }
699
700 nonstatic_oop_maps->compact();
701
702 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
703 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
704 int static_fields_size = (static_fields_end -
705 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
706 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
707
708 // Pass back information needed for InstanceKlass creation
709
710 _info->oop_map_blocks = nonstatic_oop_maps;
711 _info->_instance_size = align_object_size(instance_end / wordSize);
712 _info->_static_field_size = static_fields_size;
713 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
714 _info->_has_nonstatic_fields = _has_nonstatic_fields;
715
716 if (PrintFieldLayout) {
717 ResourceMark rm;
718 tty->print_cr("Layout of class %s", _classname->as_C_string());
719 tty->print_cr("Instance fields:");
720 _layout->print(tty, false, _super_klass);
721 tty->print_cr("Static fields:");
722 _static_layout->print(tty, true, nullptr);
723 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
724 tty->print_cr("---");
725 }
726 }
727
728 void FieldLayoutBuilder::build_layout() {
729 compute_regular_layout();
730 }
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvm.h"
30 #include "memory/resourceArea.hpp"
31 #include "oops/array.hpp"
32 #include "oops/fieldStreams.inline.hpp"
33 #include "oops/inlineKlass.inline.hpp"
34 #include "oops/instanceKlass.inline.hpp"
35 #include "oops/instanceMirrorKlass.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "runtime/fieldDescriptor.inline.hpp"
38 #include "utilities/align.hpp"
39 #include "utilities/powerOfTwo.hpp"
40
41 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
42 bool can_use_atomic_flat) {
43
44 // The can_use_atomic_flat argument indicates if an atomic flat layout can be used for this field.
45 // This argument will be false if the container is a loosely consistent value class. Using an atomic layout
46 // in a container that has no atomicity guarantee creates a risk to see this field's value be subject to
47 // tearing even if the field's class was declared atomic (non loosely consistent).
48
49 if (!UseFieldFlattening) {
50 return LayoutKind::REFERENCE;
51 }
52
53 if (field_info.field_flags().is_injected()) {
54 // don't flatten injected fields
55 return LayoutKind::REFERENCE;
56 }
57
58 if (field_info.access_flags().is_volatile()) {
59 // volatile is used as a keyword to prevent flattening
60 return LayoutKind::REFERENCE;
61 }
62
63 if (field_info.access_flags().is_static()) {
64 assert(inline_layout_info_array == nullptr ||
65 inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr,
66 "Static fields do not have inline layout info");
67 // don't flatten static fields
68 return LayoutKind::REFERENCE;
69 }
70
71 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
72 // field's type is not a known value class, using a reference
73 return LayoutKind::REFERENCE;
74 }
75
76 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
77 InlineKlass* vk = inline_field_info->klass();
78
79 if (field_info.field_flags().is_null_free_inline_type()) {
80 assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
81 if (vk->must_be_atomic()) {
82 if (vk->is_naturally_atomic(true /* null-free */) && vk->has_null_free_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
83 return (vk->has_null_free_atomic_layout() && can_use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
84 } else {
85 return vk->has_null_free_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
86 }
87 } else {
88 // To preserve the consistency between the null-marker and the field content, the NULLABLE_NON_ATOMIC_FLAT
89 // can only be used in containers that have atomicity quarantees (can_use_atomic_flat argument set to true)
90 if (field_info.access_flags().is_strict() && field_info.access_flags().is_final() && can_use_atomic_flat) {
91 if (vk->has_nullable_non_atomic_layout()) return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
92 }
93 // Another special case where NULLABLE_NON_ATOMIC_FLAT can be used: nullable empty values, because the
94 // payload of those values contains only the null-marker
95 if (vk->is_empty_inline_type() && vk->has_nullable_non_atomic_layout()) {
96 return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
97 }
98 if (UseNullableAtomicValueFlattening && vk->has_nullable_atomic_layout()) {
99 return can_use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
100 } else {
101 return LayoutKind::REFERENCE;
102 }
103 }
104 }
105
106 static bool field_is_inlineable(FieldInfo fieldinfo, LayoutKind lk, Array<InlineLayoutInfo>* ili) {
107 if (fieldinfo.field_flags().is_null_free_inline_type()) {
108 // A null-free inline type is always inlineable
109 return true;
110 }
111
112 if (lk != LayoutKind::REFERENCE) {
113 assert(lk != LayoutKind::BUFFERED, "Sanity check");
114 assert(lk != LayoutKind::UNKNOWN, "Sanity check");
115 // We've chosen a layout that isn't a normal reference
116 return true;
117 }
118
119 const int field_index = (int)fieldinfo.index();
120 if (!fieldinfo.field_flags().is_injected() &&
121 ili != nullptr &&
122 ili->adr_at(field_index)->klass() != nullptr &&
123 !ili->adr_at(field_index)->klass()->is_identity_class() &&
124 !ili->adr_at(field_index)->klass()->is_abstract()) {
125 // The field's klass is not an identity class or abstract
126 return true;
127 }
128
129 return false;
130 }
131
132 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
133 _next_block(nullptr),
134 _prev_block(nullptr),
135 _inline_klass(nullptr),
136 _block_kind(kind),
137 _layout_kind(LayoutKind::UNKNOWN),
138 _offset(-1),
139 _alignment(1),
140 _size(size),
141 _field_index(-1) {
142 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
143 "Otherwise, should use the constructor with a field index argument");
144 assert(size > 0, "Sanity check");
145 }
146
147
148 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
149 _next_block(nullptr),
150 _prev_block(nullptr),
151 _inline_klass(nullptr),
152 _block_kind(kind),
153 _layout_kind(LayoutKind::UNKNOWN),
154 _offset(-1),
155 _alignment(alignment),
156 _size(size),
157 _field_index(index) {
158 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
159 "Other kind do not have a field index");
160 assert(size > 0, "Sanity check");
161 assert(alignment > 0, "Sanity check");
162 }
163
164 bool LayoutRawBlock::fit(int size, int alignment) {
165 int adjustment = 0;
166 if ((_offset % alignment) != 0) {
167 adjustment = alignment - (_offset % alignment);
168 }
169 return _size >= size + adjustment;
170 }
171
172 FieldGroup::FieldGroup(int contended_group) :
173 _next(nullptr),
174 _small_primitive_fields(nullptr),
175 _big_primitive_fields(nullptr),
176 _oop_fields(nullptr),
177 _contended_group(contended_group) {} // -1 means no contended group, 0 means default contended group
178
179 void FieldGroup::add_primitive_field(int idx, BasicType type) {
180 int size = type2aelembytes(type);
181 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
182 if (size >= heapOopSize) {
183 add_to_big_primitive_list(block);
184 } else {
185 add_to_small_primitive_list(block);
186 }
187 }
188
189 void FieldGroup::add_oop_field(int idx) {
190 int size = type2aelembytes(T_OBJECT);
191 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
192 if (_oop_fields == nullptr) {
193 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
194 }
195 _oop_fields->append(block);
196 }
197
198 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk) {
199 const int size = vk->layout_size_in_bytes(lk);
200 const int alignment = vk->layout_alignment(lk);
201
202 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
203 block->set_inline_klass(vk);
204 block->set_layout_kind(lk);
205 if (block->size() >= heapOopSize) {
206 add_to_big_primitive_list(block);
207 } else {
208 assert(!vk->contains_oops(), "Size of Inline klass with oops should be >= heapOopSize");
209 add_to_small_primitive_list(block);
210 }
211 }
212
213 void FieldGroup::sort_by_size() {
214 if (_small_primitive_fields != nullptr) {
215 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
216 }
217 if (_big_primitive_fields != nullptr) {
218 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
219 }
220 }
221
222 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
223 if (_small_primitive_fields == nullptr) {
224 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
225 }
226 _small_primitive_fields->append(block);
227 }
228
229 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
230 if (_big_primitive_fields == nullptr) {
231 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
232 }
233 _big_primitive_fields->append(block);
234 }
235
236 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
237 _field_info(field_info),
238 _inline_layout_info_array(inline_layout_info_array),
239 _cp(cp),
240 _blocks(nullptr),
241 _start(_blocks),
242 _last(_blocks),
243 _super_first_field_offset(-1),
244 _super_alignment(-1),
245 _super_min_align_required(-1),
246 _null_reset_value_offset(-1),
247 _acmp_maps_offset(-1),
248 _super_has_nonstatic_fields(false),
249 _has_inherited_fields(false) {}
250
251 void FieldLayout::initialize_static_layout() {
252 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
253 _blocks->set_offset(0);
254 _last = _blocks;
255 _start = _blocks;
256 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
257 // during bootstrapping, the size of the java.lang.Class is still not known when layout
258 // of static field is computed. Field offsets are fixed later when the size is known
259 // (see java_lang_Class::fixup_mirror())
260 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
261 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
262 _blocks->set_offset(0);
263 }
264 }
265
266 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
267 if (super_klass == nullptr) {
268 super_ends_with_oop = false;
269 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
270 _blocks->set_offset(0);
271 _last = _blocks;
272 _start = _blocks;
273 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
274 } else {
275 reconstruct_layout(super_klass, _super_has_nonstatic_fields, super_ends_with_oop);
276 fill_holes(super_klass);
277 if ((!super_klass->has_contended_annotations()) || !_super_has_nonstatic_fields) {
278 _start = _blocks; // start allocating fields from the first empty block
279 } else {
280 _start = _last; // append fields at the end of the reconstructed layout
281 }
282 }
283 }
284
285 LayoutRawBlock* FieldLayout::first_field_block() {
286 LayoutRawBlock* block = _blocks;
287 while (block != nullptr
288 && block->block_kind() != LayoutRawBlock::INHERITED
289 && block->block_kind() != LayoutRawBlock::REGULAR
290 && block->block_kind() != LayoutRawBlock::FLAT
291 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
292 block = block->next_block();
293 }
294 return block;
295 }
296
297 // Insert a set of fields into a layout.
298 // For each field, search for an empty slot able to fit the field
299 // (satisfying both size and alignment requirements), if none is found,
300 // add the field at the end of the layout.
301 // Fields cannot be inserted before the block specified in the "start" argument
302 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
303 if (list == nullptr) return;
304 if (start == nullptr) start = this->_start;
305 bool last_search_success = false;
306 int last_size = 0;
307 int last_alignment = 0;
308 for (int i = 0; i < list->length(); i ++) {
309 LayoutRawBlock* b = list->at(i);
310 LayoutRawBlock* cursor = nullptr;
311 LayoutRawBlock* candidate = nullptr;
312 // if start is the last block, just append the field
313 if (start == last_block()) {
314 candidate = last_block();
315 }
316 // Before iterating over the layout to find an empty slot fitting the field's requirements,
317 // check if the previous field had the same requirements and if the search for a fitting slot
318 // was successful. If the requirements were the same but the search failed, a new search will
319 // fail the same way, so just append the field at the of the layout.
320 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
321 candidate = last_block();
322 } else {
323 // Iterate over the layout to find an empty slot fitting the field's requirements
324 last_size = b->size();
325 last_alignment = b->alignment();
326 cursor = last_block()->prev_block();
327 assert(cursor != nullptr, "Sanity check");
328 last_search_success = true;
329
330 assert(start->block_kind() != LayoutRawBlock::EMPTY, "");
331 while (cursor != start) {
332 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
333 if (candidate == nullptr || cursor->size() < candidate->size()) {
334 candidate = cursor;
335 }
336 }
337 cursor = cursor->prev_block();
338 }
339 if (candidate == nullptr) {
340 candidate = last_block();
341 last_search_success = false;
342 }
343 assert(candidate != nullptr, "Candidate must not be null");
344 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
345 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
346 }
347 insert_field_block(candidate, b);
348 }
349 }
350
351 // Used for classes with hard coded field offsets, insert a field at the specified offset */
352 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
353 assert(block != nullptr, "Sanity check");
354 block->set_offset(offset);
355 if (start == nullptr) {
356 start = this->_start;
357 }
358 LayoutRawBlock* slot = start;
359 while (slot != nullptr) {
360 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
361 slot == _last){
362 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
363 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
364 if (slot->offset() < block->offset()) {
365 int adjustment = block->offset() - slot->offset();
366 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
367 insert(slot, adj);
368 }
369 insert(slot, block);
370 if (slot->size() == 0) {
371 remove(slot);
372 }
373 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
374 _field_info->adr_at(block->field_index())->set_offset(block->offset());
375 }
376 return;
377 }
378 slot = slot->next_block();
379 }
380 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
381 }
382
383 // The allocation logic uses a best fit strategy: the set of fields is allocated
384 // in the first empty slot big enough to contain the whole set ((including padding
385 // to fit alignment constraints).
386 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
387 if (list == nullptr) return;
388 if (start == nullptr) {
389 start = _start;
390 }
391 // This code assumes that if the first block is well aligned, the following
392 // blocks would naturally be well aligned (no need for adjustment)
393 int size = 0;
394 for (int i = 0; i < list->length(); i++) {
395 size += list->at(i)->size();
410 }
411 assert(candidate != nullptr, "Candidate must not be null");
412 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
413 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
414 }
415
416 for (int i = 0; i < list->length(); i++) {
417 LayoutRawBlock* b = list->at(i);
418 insert_field_block(candidate, b);
419 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
420 }
421 }
422
423 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
424 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
425 if (slot->offset() % block->alignment() != 0) {
426 int adjustment = block->alignment() - (slot->offset() % block->alignment());
427 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
428 insert(slot, adj);
429 }
430 assert(slot->size() >= block->size(), "Enough space must remain after adjustment");
431 insert(slot, block);
432 if (slot->size() == 0) {
433 remove(slot);
434 }
435 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
436 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
437 _field_info->adr_at(block->field_index())->set_offset(block->offset());
438 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
439 _null_reset_value_offset = block->offset();
440 }
441 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
442 _acmp_maps_offset = block->offset();
443 }
444 }
445 if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
446 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
447 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
448 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
449 }
450
451 return block;
452 }
453
454 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_nonstatic_fields, bool& ends_with_oop) {
455 has_nonstatic_fields = ends_with_oop = false;
456 if (ik->is_abstract() && !ik->is_identity_class()) {
457 _super_alignment = type2aelembytes(BasicType::T_LONG);
458 }
459 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
460 BasicType last_type;
461 int last_offset = -1;
462 while (ik != nullptr) {
463 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
464 BasicType type = Signature::basic_type(fs.signature());
465 // distinction between static and non-static fields is missing
466 if (fs.access_flags().is_static()) continue;
467 has_nonstatic_fields = true;
468 _has_inherited_fields = true;
469 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
470 _super_first_field_offset = fs.offset();
471 }
472 LayoutRawBlock* block;
473 if (fs.is_flat()) {
474 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
475 InlineKlass* vk = layout_info.klass();
476 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
477 vk->layout_size_in_bytes(layout_info.kind()),
478 vk->layout_alignment(layout_info.kind()));
479 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
480 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
481 } else {
482 int size = type2aelembytes(type);
483 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
484 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
485 // For primitive types, the alignment is equal to the size
486 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
487 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
488 }
489 if (fs.offset() > last_offset) {
490 last_offset = fs.offset();
491 last_type = type;
492 }
493 block->set_offset(fs.offset());
494 all_fields->append(block);
495 }
496 ik = ik->super() == nullptr ? nullptr : ik->super();
497 }
498 assert(last_offset == -1 || last_offset > 0, "Sanity");
499 if (last_offset > 0 &&
500 (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
501 ends_with_oop = true;
502 }
503
504 all_fields->sort(LayoutRawBlock::compare_offset);
505 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
506 _blocks->set_offset(0);
507 _last = _blocks;
508 for(int i = 0; i < all_fields->length(); i++) {
509 LayoutRawBlock* b = all_fields->at(i);
510 _last->set_next_block(b);
511 b->set_prev_block(_last);
512 _last = b;
513 }
514 _start = _blocks;
515 }
516
517 // Called during the reconstruction of a layout, after fields from super
518 // classes have been inserted. It fills unused slots between inserted fields
519 // with EMPTY blocks, so the regular field insertion methods would work.
520 // This method handles classes with @Contended annotations differently
521 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
522 // fields to interfere with contended fields/classes.
523 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
524 assert(_blocks != nullptr, "Sanity check");
525 assert(_blocks->offset() == 0, "first block must be at offset zero");
526 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
527 LayoutRawBlock* b = _blocks;
528 while (b->next_block() != nullptr) {
529 if (b->next_block()->offset() > (b->offset() + b->size())) {
530 int size = b->next_block()->offset() - (b->offset() + b->size());
531 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
532 // Tracked by JDK-8383383
533 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
534 empty->set_offset(b->offset() + b->size());
535 empty->set_next_block(b->next_block());
536 b->next_block()->set_prev_block(empty);
537 b->set_next_block(empty);
538 empty->set_prev_block(b);
539 }
540 b = b->next_block();
541 }
542 assert(b->next_block() == nullptr, "Invariant at this point");
543 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
544 // If the super class has @Contended annotation, a padding block is
545 // inserted at the end to ensure that fields from the subclasses won't share
546 // the cache line of the last field of the contended class
547 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
548 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
549 p->set_offset(b->offset() + b->size());
550 b->set_next_block(p);
551 p->set_prev_block(b);
552 b = p;
553 }
554
555 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
556 last->set_offset(b->offset() + b->size());
557 assert(last->offset() > 0, "Sanity check");
558 b->set_next_block(last);
559 last->set_prev_block(b);
560 _last = last;
561 }
562
563 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
564 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
565 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
566 block->set_offset(slot->offset());
567 slot->set_offset(slot->offset() + block->size());
568 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
569 assert(slot->size() - block->size() >= 0, "no negative size allowed");
570 slot->set_size(slot->size() - block->size());
571 block->set_prev_block(slot->prev_block());
572 block->set_next_block(slot);
573 slot->set_prev_block(block);
574 if (block->prev_block() != nullptr) {
575 block->prev_block()->set_next_block(block);
576 }
577 if (_blocks == slot) {
578 _blocks = block;
579 }
580 if (_start == slot) {
581 _start = block;
582 }
583 return block;
584 }
585
586 void FieldLayout::remove(LayoutRawBlock* block) {
587 assert(block != nullptr, "Sanity check");
588 assert(block != _last, "Sanity check");
589 if (_blocks == block) {
590 _blocks = block->next_block();
591 if (_blocks != nullptr) {
592 _blocks->set_prev_block(nullptr);
593 }
594 } else {
595 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
596 block->prev_block()->set_next_block(block->next_block());
597 block->next_block()->set_prev_block(block->prev_block());
598 }
599 if (block == _start) {
600 _start = block->prev_block();
601 }
602 }
603
604 void FieldLayout::shift_fields(int shift) {
605 LayoutRawBlock* b = first_field_block();
606 assert(b != nullptr, "shift_fields must not be called if layout has no fields");
607 LayoutRawBlock* previous = b->prev_block();
608 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
609 previous->set_size(previous->size() + shift);
610 } else {
611 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
612 nb->set_offset(b->offset());
613 previous->set_next_block(nb);
614 nb->set_prev_block(previous);
615 b->set_prev_block(nb);
616 nb->set_next_block(b);
617 }
618 while (b != nullptr) {
619 b->set_offset(b->offset() + shift);
620 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
621 _field_info->adr_at(b->field_index())->set_offset(b->offset());
622 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
623 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
624 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
625 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
626 }
627 }
628 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
629 b = b->next_block();
630 }
631 }
632
633 LayoutRawBlock* FieldLayout::find_null_marker() {
634 LayoutRawBlock* b = _blocks;
635 while (b != nullptr) {
636 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
637 return b;
638 }
639 b = b->next_block();
640 }
641 ShouldNotReachHere();
642 return nullptr;
643 }
644
645 void FieldLayout::remove_null_marker() {
646 LayoutRawBlock* b = first_field_block();
647 while (b != nullptr) {
648 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
649 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
650 LayoutRawBlock* n = b->next_block();
651 remove(b);
652 n->set_offset(b->offset());
653 n->set_size(n->size() + b->size());
654 } else {
655 b->set_block_kind(LayoutRawBlock::EMPTY);
656 }
657 return;
658 }
659 b = b->next_block();
660 }
661 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
662 }
663
664 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields, bool dummy_field_is_reused_as_null_marker) {
665 ResourceMark rm;
666 LayoutRawBlock* b = _blocks;
667 while(b != _last) {
668 switch(b->block_kind()) {
669 case LayoutRawBlock::REGULAR: {
670 FieldInfo* fi = _field_info->adr_at(b->field_index());
671 output->print(" @%d %s %d/%d \"%s\" %s",
672 b->offset(),
673 "REGULAR",
674 b->size(),
675 b->alignment(),
676 fi->name(_cp)->as_C_string(),
677 fi->signature(_cp)->as_C_string());
678
679 if (dummy_field_is_reused_as_null_marker) {
680 const bool is_dummy_field = fi->name(_cp)->fast_compare(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(empty_marker_name))) == 0;
681 if (is_dummy_field) {
682 output->print(" (reused as null-marker)");
683 }
684 }
685
686 output->cr();
687 break;
688 }
689 case LayoutRawBlock::FLAT: {
690 FieldInfo* fi = _field_info->adr_at(b->field_index());
691 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
692 assert(ik != nullptr, "");
693 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
694 b->offset(),
695 "FLAT",
696 b->size(),
697 b->alignment(),
698 fi->name(_cp)->as_C_string(),
699 fi->signature(_cp)->as_C_string(),
700 ik->name()->as_C_string(),
701 ik->class_loader_data(),
702 LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
703 break;
704 }
705 case LayoutRawBlock::RESERVED: {
706 output->print_cr(" @%d %s %d/-",
707 b->offset(),
708 "RESERVED",
709 b->size());
710 break;
711 }
712 case LayoutRawBlock::INHERITED: {
713 assert(!is_static, "Static fields are not inherited in layouts");
714 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
715 bool found = false;
716 const InstanceKlass* ik = super;
717 while (!found && ik != nullptr) {
718 for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
719 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
720 output->print_cr(" @%d %s %d/%d \"%s\" %s",
721 b->offset(),
722 "INHERITED",
723 b->size(),
724 b->alignment(),
725 fs.name()->as_C_string(),
726 fs.signature()->as_C_string());
727 found = true;
728 break;
729 }
730 }
731 ik = ik->super();
732 }
733 break;
734 }
735 case LayoutRawBlock::EMPTY:
736 output->print_cr(" @%d %s %d/1",
737 b->offset(),
738 "EMPTY",
739 b->size());
740 break;
741 case LayoutRawBlock::PADDING:
742 output->print_cr(" @%d %s %d/1",
743 b->offset(),
744 "PADDING",
745 b->size());
746 break;
747 case LayoutRawBlock::NULL_MARKER:
748 {
749 output->print_cr(" @%d %s %d/1 ",
750 b->offset(),
751 "NULL_MARKER",
752 b->size());
753 break;
754 }
755 default:
756 fatal("Unknown block type");
757 }
758 b = b->next_block();
759 }
760 }
761
762 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
763 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
764 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
765 _classname(classname),
766 _loader_data(loader_data),
767 _super_klass(super_klass),
768 _constant_pool(constant_pool),
769 _field_info(field_info),
770 _info(info),
771 _inline_layout_info_array(inline_layout_info_array),
772 _root_group(nullptr),
773 _contended_groups(GrowableArray<FieldGroup*>(8)),
774 _static_fields(nullptr),
775 _layout(nullptr),
776 _static_layout(nullptr),
777 _nonstatic_oopmap_count(0),
778 _payload_alignment(-1),
779 _payload_offset(-1),
780 _null_marker_offset(-1),
781 _payload_size_in_bytes(-1),
782 _null_free_non_atomic_layout_size_in_bytes(-1),
783 _null_free_non_atomic_layout_alignment(-1),
784 _null_free_atomic_layout_size_in_bytes(-1),
785 _nullable_atomic_layout_size_in_bytes(-1),
786 _nullable_non_atomic_layout_size_in_bytes(-1),
787 _fields_size_sum(0),
788 _declared_nonstatic_fields_count(0),
789 _has_non_naturally_atomic_fields(false),
790 _is_naturally_atomic(false),
791 _must_be_atomic(must_be_atomic),
792 _has_nonstatic_fields(false),
793 _has_inlineable_fields(false),
794 _has_inlined_fields(false),
795 _is_contended(is_contended),
796 _is_inline_type(is_inline_type),
797 _is_abstract_value(is_abstract_value),
798 _is_empty_inline_class(false) {}
799
800 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
801 assert(g > 0, "must only be called for named contended groups");
802 FieldGroup* fg = nullptr;
803 for (int i = 0; i < _contended_groups.length(); i++) {
804 fg = _contended_groups.at(i);
805 if (fg->contended_group() == g) return fg;
806 }
807 fg = new FieldGroup(g);
808 _contended_groups.append(fg);
809 return fg;
810 }
811
812 void FieldLayoutBuilder::prologue() {
813 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
814 const InstanceKlass* super_klass = _super_klass;
815 _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
816 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
817 if (super_klass != nullptr) {
818 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
819 }
820 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
821 _static_layout->initialize_static_layout();
822 _static_fields = new FieldGroup();
823 _root_group = new FieldGroup();
824 }
825
826 // Field sorting for regular (non-inline) classes:
827 // - fields are sorted in static and non-static fields
828 // - non-static fields are also sorted according to their contention group
829 // (support of the @Contended annotation)
830 // - @Contended annotation is ignored for static fields
831 // - field flattening decisions are taken in this method
832 void FieldLayoutBuilder::regular_field_sorting() {
833 int idx = 0;
834 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
835 FieldGroup* group = nullptr;
836 FieldInfo fieldinfo = *it;
837 if (fieldinfo.access_flags().is_static()) {
838 group = _static_fields;
839 } else {
840 _has_nonstatic_fields = true;
841 if (fieldinfo.field_flags().is_contended()) {
842 int g = fieldinfo.contended_group();
843 if (g == 0) {
844 group = new FieldGroup(true);
845 _contended_groups.append(group);
846 } else {
847 group = get_or_create_contended_group(g);
848 }
849 } else {
850 group = _root_group;
851 }
852 }
853 assert(group != nullptr, "invariant");
854 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
855 switch(type) {
856 case T_BYTE:
857 case T_CHAR:
858 case T_DOUBLE:
859 case T_FLOAT:
860 case T_INT:
861 case T_LONG:
862 case T_SHORT:
863 case T_BOOLEAN:
864 group->add_primitive_field(idx, type);
865 break;
866 case T_OBJECT:
867 case T_ARRAY:
868 {
869 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
870
871 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
872 _has_inlineable_fields = true;
873 }
874
875 if (lk == LayoutKind::REFERENCE) {
876 if (group != _static_fields) _nonstatic_oopmap_count++;
877 group->add_oop_field(idx);
878 } else {
879 assert(group != _static_fields, "Static fields are not flattened");
880 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
881 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
882
883 const int field_index = (int)fieldinfo.index();
884 assert(_inline_layout_info_array != nullptr, "Array must have been created");
885 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
886 _has_inlined_fields = true;
887 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
888 group->add_flat_field(idx, vk, lk);
889 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
890 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
891 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
892 _field_info->adr_at(idx)->set_layout_kind(lk);
893 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
894 }
895 break;
896 }
897 default:
898 fatal("Something wrong?");
899 }
900 }
901 _root_group->sort_by_size();
902 _static_fields->sort_by_size();
903 if (!_contended_groups.is_empty()) {
904 for (int i = 0; i < _contended_groups.length(); i++) {
905 _contended_groups.at(i)->sort_by_size();
906 }
907 }
908 }
909
910 /* Field sorting for inline classes:
911 * - because inline classes are immutable, the @Contended annotation is ignored
912 * when computing their layout (with only read operation, there's no false
913 * sharing issue)
914 * - this method also records the alignment of the field with the most
915 * constraining alignment, this value is then used as the alignment
916 * constraint when flattening this inline type into another container
917 * - field flattening decisions are taken in this method (those decisions are
918 * currently only based in the size of the fields to be flattened, the size
919 * of the resulting instance is not considered)
920 */
921 void FieldLayoutBuilder::inline_class_field_sorting() {
922 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
923 int alignment = -1;
924 int idx = 0;
925 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
926 FieldGroup* group = nullptr;
927 FieldInfo fieldinfo = *it;
928 int field_alignment = 1;
929 if (fieldinfo.access_flags().is_static()) {
930 group = _static_fields;
931 } else {
932 _has_nonstatic_fields = true;
933 _declared_nonstatic_fields_count++;
934 group = _root_group;
935 }
936 assert(group != nullptr, "invariant");
937 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
938 switch(type) {
939 case T_BYTE:
940 case T_CHAR:
941 case T_DOUBLE:
942 case T_FLOAT:
943 case T_INT:
944 case T_LONG:
945 case T_SHORT:
946 case T_BOOLEAN:
947 if (group != _static_fields) {
948 field_alignment = type2aelembytes(type); // alignment == size for primitive types
949 }
950 group->add_primitive_field(idx, type);
951 break;
952 case T_OBJECT:
953 case T_ARRAY:
954 {
955 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
956 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
957
958 if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
959 _has_inlineable_fields = true;
960 }
961
962 if (lk == LayoutKind::REFERENCE) {
963 if (group != _static_fields) {
964 _nonstatic_oopmap_count++;
965 field_alignment = type2aelembytes(type); // alignment == size for oops
966 }
967 group->add_oop_field(idx);
968 } else {
969 assert(group != _static_fields, "Static fields are not flattened");
970 assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
971 "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
972
973 const int field_index = (int)fieldinfo.index();
974 assert(_inline_layout_info_array != nullptr, "Array must have been created");
975 assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
976 _has_inlined_fields = true;
977 InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
978 if (!vk->is_naturally_atomic(LayoutKindHelper::is_null_free_flat(lk))) _has_non_naturally_atomic_fields = true;
979 group->add_flat_field(idx, vk, lk);
980 _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
981 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
982 field_alignment = vk->layout_alignment(lk);
983 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
984 _field_info->adr_at(idx)->set_layout_kind(lk);
985 }
986 break;
987 }
988 default:
989 fatal("Unexpected BasicType");
990 }
991 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
992 }
993 _root_group->sort_by_size();
994 _static_fields->sort_by_size();
995 _payload_alignment = alignment;
996 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
997 }
998
999 LayoutRawBlock* FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
1000 LayoutRawBlock* padding = nullptr;
1001 if (ContendedPaddingWidth > 0) {
1002 padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
1003 _layout->insert(slot, padding);
1004 }
1005 return padding;
1006 }
1007
1008 // Computation of regular classes layout is an evolution of the previous default layout
1009 // (FieldAllocationStyle 1):
1010 // - primitive fields (both primitive types and flat inline types) are allocated
1011 // first (from the biggest to the smallest)
1012 // - oop fields are allocated, either in existing gaps or at the end of
1013 // the layout. We allocate oops in a single block to have a single oop map entry.
1014 // - if the super class ended with an oop, we lead with oops. That will cause the
1015 // trailing oop map entry of the super class and the oop map entry of this class
1016 // to be folded into a single entry later. Correspondingly, if the super class
1017 // ends with a primitive field, we gain nothing by leading with oops; therefore
1018 // we let oop fields trail, thus giving future derived classes the chance to apply
1019 // the same trick.
1020 void FieldLayoutBuilder::compute_regular_layout() {
1021 bool need_tail_padding = false;
1022 prologue();
1023 regular_field_sorting();
1024 if (_is_contended) {
1025 // insertion is currently easy because the current strategy doesn't try to fill holes
1026 // in super classes layouts => the _start block is by consequence the _last_block
1027 _layout->set_start(_layout->last_block());
1028 LayoutRawBlock* padding = insert_contended_padding(_layout->start());
1029 if (padding != nullptr) {
1030 // Setting the padding block as start ensures we do not insert past it.
1031 _layout->set_start(padding);
1032 }
1033 need_tail_padding = true;
1034 }
1035
1036 if (_super_ends_with_oop) {
1037 _layout->add(_root_group->oop_fields());
1038 _layout->add(_root_group->big_primitive_fields());
1039 _layout->add(_root_group->small_primitive_fields());
1040 } else {
1041 _layout->add(_root_group->big_primitive_fields());
1042 _layout->add(_root_group->small_primitive_fields());
1043 _layout->add(_root_group->oop_fields());
1044 }
1045
1046 if (!_contended_groups.is_empty()) {
1047 for (int i = 0; i < _contended_groups.length(); i++) {
1048 FieldGroup* cg = _contended_groups.at(i);
1049 LayoutRawBlock* start = _layout->last_block();
1050 LayoutRawBlock* padding = insert_contended_padding(start);
1051
1052 // Do not insert fields past the padding block.
1053 if (padding != nullptr) {
1054 start = padding;
1055 }
1056
1057 _layout->add(cg->big_primitive_fields(), start);
1058 _layout->add(cg->small_primitive_fields(), start);
1059 _layout->add(cg->oop_fields(), start);
1060 need_tail_padding = true;
1061 }
1062 }
1063
1064 if (need_tail_padding) {
1065 insert_contended_padding(_layout->last_block());
1066 }
1067
1068 // Warning: IntanceMirrorKlass expects static oops to be allocated first
1069 _static_layout->add_contiguously(_static_fields->oop_fields());
1070 _static_layout->add(_static_fields->big_primitive_fields());
1071 _static_layout->add(_static_fields->small_primitive_fields());
1072
1073 epilogue();
1074 }
1075
1076 /* Computation of inline classes has a slightly different strategy than for
1077 * regular classes. Regular classes have their oop fields allocated at the end
1078 * of the layout to increase GC performances. Unfortunately, this strategy
1079 * increases the number of empty slots inside an instance. Because the purpose
1080 * of inline classes is to be embedded into other containers, it is critical
1081 * to keep their size as small as possible. For this reason, the allocation
1082 * strategy is:
1083 * - big primitive fields (primitive types and flat inline types larger
1084 * than an oop) are allocated first (from the biggest to the smallest)
1085 * - then oop fields
1086 * - then small primitive fields (from the biggest to the smallest)
1087 */
1088 void FieldLayoutBuilder::compute_inline_class_layout() {
1089
1090 // Test if the concrete inline class is an empty class (no instance fields)
1091 // and insert a dummy field if needed
1092 if (!_is_abstract_value) {
1093 bool declares_nonstatic_fields = false;
1094 for (FieldInfo fieldinfo : *_field_info) {
1095 if (!fieldinfo.access_flags().is_static()) {
1096 declares_nonstatic_fields = true;
1097 break;
1098 }
1099 }
1100
1101 if (!declares_nonstatic_fields) {
1102 bool has_inherited_fields = _super_klass != nullptr && _super_klass->has_nonstatic_fields();
1103 if (!has_inherited_fields) {
1104 // Inject ".empty" dummy field
1105 _is_empty_inline_class = true;
1106 FieldInfo::FieldFlags fflags(0);
1107 fflags.update_injected(true);
1108 AccessFlags aflags;
1109 FieldInfo fi(aflags,
1110 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1111 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1112 0,
1113 fflags);
1114 int idx = _field_info->append(fi);
1115 _field_info->adr_at(idx)->set_index(idx);
1116 }
1117 }
1118 }
1119
1120 prologue();
1121 inline_class_field_sorting();
1122
1123 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1124
1125 if (!_layout->super_has_nonstatic_fields()) {
1126 // No inherited fields, the layout must be empty except for the RESERVED block
1127 // PADDING is inserted if needed to ensure the correct alignment of the payload.
1128 if (_is_abstract_value && _has_nonstatic_fields) {
1129 // non-static fields of the abstract class must be laid out without knowing
1130 // the alignment constraints of the fields of the sub-classes, so the worst
1131 // case scenario is assumed, which is currently the alignment of T_LONG.
1132 // PADDING is added if needed to ensure the payload will respect this alignment.
1133 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1134 }
1135 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY, "Unexpected");
1136 LayoutRawBlock* first_empty = _layout->start()->next_block();
1137 if (first_empty->offset() % _payload_alignment != 0) {
1138 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1139 _layout->insert(first_empty, padding);
1140 if (first_empty->size() == 0) {
1141 _layout->remove(first_empty);
1142 }
1143 _layout->set_start(padding);
1144 }
1145 } else { // the class has inherited some fields from its super(s)
1146 if (!_is_abstract_value) {
1147 // This is the step where the layout of the final concrete value class' layout
1148 // is computed. Super abstract value classes might have been too conservative
1149 // regarding alignment constraints, but now that the full set of non-static fields is
1150 // known, compute which alignment to use, then set first allowed field offset
1151
1152 assert(_has_nonstatic_fields, "Concrete value classes must have at least one field");
1153 if (_payload_alignment == -1) { // current class declares no local nonstatic fields
1154 _payload_alignment = _layout->super_min_align_required();
1155 }
1156
1157 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1158 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1159
1160 if (_payload_alignment < _layout->super_alignment()) {
1161 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1162 assert(new_alignment % _payload_alignment == 0, "Must be");
1163 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1164 _payload_alignment = new_alignment;
1165 }
1166 _layout->set_start(_layout->first_field_block());
1167 } else {
1168 // Abstract value class inheriting fields, restore the pessimistic alignment
1169 // constraint (see comment above) and ensure no field will be inserted before
1170 // the first inherited field.
1171 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1172 _layout->set_start(_layout->first_field_block());
1173 }
1174 }
1175
1176 _layout->add(_root_group->big_primitive_fields());
1177 _layout->add(_root_group->oop_fields());
1178 _layout->add(_root_group->small_primitive_fields());
1179
1180 LayoutRawBlock* first_field = _layout->first_field_block();
1181 if (first_field != nullptr) {
1182 _payload_offset = _layout->first_field_block()->offset();
1183 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1184 } else {
1185 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1186 _payload_offset = _layout->blocks()->size();
1187 _payload_size_in_bytes = 0;
1188 }
1189
1190 // Determining if the value class is naturally atomic:
1191 if (_declared_nonstatic_fields_count == 0) {
1192 _is_naturally_atomic = _super_klass == vmClasses::Object_klass() || _super_klass->is_naturally_atomic(true /* null-free */);
1193 } else if (_declared_nonstatic_fields_count == 1) {
1194 _is_naturally_atomic = !_layout->super_has_nonstatic_fields() && !_has_non_naturally_atomic_fields;
1195 } else {
1196 _is_naturally_atomic = false;
1197 }
1198
1199 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1200 // From this, additional layouts will be computed: atomic and nullable layouts
1201 // Once those additional layouts are computed, the raw layout might need some adjustments
1202
1203 bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1204
1205 if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1206 // Validation of the non atomic layout
1207 if (UseNullFreeNonAtomicValueFlattening && (!_must_be_atomic || _is_naturally_atomic)) {
1208 _null_free_non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1209 _null_free_non_atomic_layout_alignment = _payload_alignment;
1210 }
1211
1212 // Next step is to compute the characteristics for a layout enabling atomic updates
1213 if (UseNullFreeAtomicValueFlattening) {
1214 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1215 if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1216 _null_free_atomic_layout_size_in_bytes = atomic_size;
1217 }
1218 }
1219
1220 // Next step is the nullable layouts: they must include a null marker
1221 // Note about the special case of j.l.Double and j.l.Long: the introduction of
1222 // the NULLABLE_NON_ATOMIC_FLAT layout caused an increase of the size of their
1223 // instances which causes performance regression (see JDK-8379145).
1224 // The temporary solution is to simply disable nullable layouts for these classes
1225 // until a better fix is implemented (see JDK-8382361).
1226 if ((UseNullableAtomicValueFlattening || UseNullableNonAtomicValueFlattening)
1227 && _classname != vmSymbols::java_lang_Double() && _classname != vmSymbols::java_lang_Long()) {
1228 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1229 LayoutRawBlock* b = _layout->first_field_block();
1230 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1231 int null_marker_offset = -1;
1232 if (_is_empty_inline_class) {
1233 // Reusing the dummy field as a field marker
1234 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1235 null_marker_offset = b->offset();
1236 } else {
1237 while (b != _layout->last_block()) {
1238 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1239 break;
1240 }
1241 b = b->next_block();
1242 }
1243 if (b != _layout->last_block()) {
1244 // found an empty slot, register its offset from the beginning of the payload
1245 null_marker_offset = b->offset();
1246 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1247 _layout->add_field_at_offset(marker, b->offset());
1248 }
1249 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1250 int last_offset = _layout->last_block()->offset();
1251 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1252 _layout->insert_field_block(_layout->last_block(), marker);
1253 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1254 null_marker_offset = marker->offset();
1255 }
1256 }
1257 assert(null_marker_offset != -1, "Sanity check");
1258 // Now that the null marker is there, the size of the nullable layout must computed
1259 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1260 if (UseNullableNonAtomicValueFlattening) {
1261 _nullable_non_atomic_layout_size_in_bytes = new_raw_size;
1262 _null_marker_offset = null_marker_offset;
1263 _null_free_non_atomic_layout_alignment = _payload_alignment;
1264 }
1265 if (UseNullableAtomicValueFlattening) {
1266 // For the nullable atomic layout, the size mut be compatible with the platform capabilities
1267 int nullable_atomic_size = round_up_power_of_2(new_raw_size);
1268 if (nullable_atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1269 _nullable_atomic_layout_size_in_bytes = nullable_atomic_size;
1270 _null_marker_offset = null_marker_offset;
1271 }
1272 }
1273 if (_null_marker_offset == -1) { // No nullable layout has been accepted
1274 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1275 // from the layout, otherwise it will appear anyway if the layout is printer
1276 if (!_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1277 _layout->remove_null_marker();
1278 }
1279 }
1280 }
1281 // If the inline class has an atomic or nullable atomic layout,
1282 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1283 // could remain simple (single instruction without intermediate copy). This might required
1284 // to shift all fields in the raw layout, but this operation is possible only if the class
1285 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1286 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1287 // and loosely consistent are supported.
1288 int required_alignment = _payload_alignment;
1289 if (has_null_free_atomic_layout() && required_alignment < null_free_atomic_layout_size_in_bytes()) {
1290 required_alignment = null_free_atomic_layout_size_in_bytes();
1291 }
1292 if (has_nullable_atomic_layout() && required_alignment < nullable_atomic_layout_size_in_bytes()) {
1293 required_alignment = nullable_atomic_layout_size_in_bytes();
1294 }
1295 int shift = (required_alignment - (first_field->offset() % required_alignment)) % required_alignment;
1296 if (shift != 0) {
1297 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1298 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1299 _layout->shift_fields(shift);
1300 _payload_offset = _layout->first_field_block()->offset();
1301 assert(is_aligned(_payload_offset, required_alignment), "Fields should have been shifted to respect the required alignment");
1302 if (has_nullable_atomic_layout() || has_nullable_non_atomic_layout()) {
1303 assert(!_is_empty_inline_class, "Should not get here with empty values");
1304 _null_marker_offset = _layout->find_null_marker()->offset();
1305 }
1306 _payload_alignment = required_alignment;
1307 } else {
1308 _null_free_atomic_layout_size_in_bytes = -1;
1309 if (has_nullable_atomic_layout() && !has_nullable_non_atomic_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1310 _layout->remove_null_marker();
1311 _null_marker_offset = -1;
1312 }
1313 _nullable_atomic_layout_size_in_bytes = -1;
1314 }
1315 } else {
1316 _payload_alignment = required_alignment;
1317 }
1318
1319 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1320 // instances must also be the nullable layout, in order to be able to set the null marker to
1321 // non-null before copying the payload to other containers.
1322 if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_atomic_layout_size_in_bytes()) {
1323 _payload_size_in_bytes = nullable_atomic_layout_size_in_bytes();
1324 }
1325 if (has_nullable_non_atomic_layout() && payload_layout_size_in_bytes() < nullable_non_atomic_layout_size_in_bytes()) {
1326 _payload_size_in_bytes = nullable_non_atomic_layout_size_in_bytes();
1327 }
1328
1329 // if the inline class has a null-free atomic layout, the the layout used in heap allocated standalone
1330 // instances must have at least equal to the atomic layout to allow safe read/write atomic
1331 // operation
1332 if (has_null_free_atomic_layout() && payload_layout_size_in_bytes() < null_free_atomic_layout_size_in_bytes()) {
1333 _payload_size_in_bytes = null_free_atomic_layout_size_in_bytes();
1334 }
1335 }
1336 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1337 _static_layout->add_contiguously(_static_fields->oop_fields());
1338 _static_layout->add(_static_fields->big_primitive_fields());
1339 _static_layout->add(_static_fields->small_primitive_fields());
1340
1341 generate_acmp_maps();
1342 epilogue();
1343 }
1344
1345 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps, InlineKlass* vklass, int offset) {
1346 int diff = offset - vklass->payload_offset();
1347 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1348 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1349 while (map < last_map) {
1350 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1351 map++;
1352 }
1353 }
1354
1355 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1356 if (list == nullptr) {
1357 return;
1358 }
1359
1360 for (int i = 0; i < list->length(); i++) {
1361 LayoutRawBlock* f = list->at(i);
1362 if (f->block_kind() == LayoutRawBlock::FLAT) {
1363 InlineKlass* vk = f->inline_klass();
1364 assert(vk != nullptr, "Should have been initialized");
1365 if (vk->contains_oops()) {
1366 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1367 }
1368 }
1369 }
1370 }
1371
1372 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1373 if (group->oop_fields() != nullptr) {
1374 for (int i = 0; i < group->oop_fields()->length(); i++) {
1375 LayoutRawBlock* b = group->oop_fields()->at(i);
1376 nonstatic_oop_maps->add(b->offset(), 1);
1377 }
1378 }
1379 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1380 }
1381
1382 static int insert_segment(GrowableArray<AcmpMapSegment>* map, int offset, int size, int last_idx) {
1383 if (map->is_empty()) {
1384 return map->append(AcmpMapSegment(offset, size));
1385 }
1386 int start = map->adr_at(last_idx)->_offset > offset ? 0 : last_idx;
1387 bool inserted = false;
1388 for (int c = start; c < map->length(); c++) {
1389 if (offset == (map->adr_at(c)->_offset + map->adr_at(c)->_size)) {
1390 //contiguous to the last field, can be coalesced
1391 map->adr_at(c)->_size = map->adr_at(c)->_size + size;
1392 inserted = true;
1393 break; // break out of the for loop
1394 }
1395 if (offset < (map->adr_at(c)->_offset)) {
1396 map->insert_before(c, AcmpMapSegment(offset, size));
1397 last_idx = c;
1398 inserted = true;
1399 break; // break out of the for loop
1400 }
1401 }
1402 if (!inserted) {
1403 last_idx = map->append(AcmpMapSegment(offset, size));
1404 }
1405 return last_idx;
1406 }
1407
1408 static int insert_map_at_offset(GrowableArray<AcmpMapSegment>* nonoop_map, GrowableArray<int>* oop_map,
1409 const InstanceKlass* ik, int field_offset, int last_idx) {
1410 Array<int>* super_map = ik->acmp_maps_array();
1411 assert(super_map != nullptr, "super class must have an acmp map");
1412 int num_nonoop_field = super_map->at(0);
1413 for (int i = 0; i < num_nonoop_field; i++) {
1414 last_idx = insert_segment(nonoop_map,
1415 field_offset + super_map->at( i * 2 + 1),
1416 super_map->at( i * 2 + 2), last_idx);
1417 }
1418 int len = super_map->length();
1419 for (int i = num_nonoop_field * 2 + 1; i < len; i++) {
1420 oop_map->append(field_offset + super_map->at(i));
1421 }
1422 return last_idx;
1423 }
1424
1425 static void split_after(GrowableArray<AcmpMapSegment>* map, int idx, int head) {
1426 int offset = map->adr_at(idx)->_offset;
1427 int size = map->adr_at(idx)->_size;
1428 if (size <= head) return;
1429 map->adr_at(idx)->_offset = offset + head;
1430 map->adr_at(idx)->_size = size - head;
1431 map->insert_before(idx, AcmpMapSegment(offset, head));
1432
1433 }
1434
1435 void FieldLayoutBuilder::generate_acmp_maps() {
1436 assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1437
1438 // create/initialize current class' maps
1439 _nonoop_acmp_map = new GrowableArray<AcmpMapSegment>();
1440 _oop_acmp_map = new GrowableArray<int>();
1441 if (_is_empty_inline_class) return;
1442 // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1443 // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1444 // next to the previous insertion. However, in some cases local fields and inherited fields can be
1445 // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1446 int last_idx = 0;
1447 if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) { // Assumes j.l.Object cannot have fields
1448 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, last_idx);
1449 }
1450
1451 // Processing local fields
1452 LayoutRawBlock* b = _layout->blocks();
1453 while(b != _layout->last_block()) {
1454 switch(b->block_kind()) {
1455 case LayoutRawBlock::RESERVED:
1456 case LayoutRawBlock::EMPTY:
1457 case LayoutRawBlock::PADDING:
1458 case LayoutRawBlock::NULL_MARKER:
1459 case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1460 // skip
1461 break;
1462
1463 case LayoutRawBlock::REGULAR:
1464 {
1465 FieldInfo* fi = _field_info->adr_at(b->field_index());
1466 if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1467 _oop_acmp_map->append(b->offset());
1468 } else {
1469 // Non-oop case
1470 last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1471 }
1472 break;
1473 }
1474 case LayoutRawBlock::FLAT:
1475 {
1476 InlineKlass* vk = b->inline_klass();
1477 int field_offset = b->offset() - vk->payload_offset();
1478 last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, field_offset, last_idx);
1479 if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1480 int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1481 last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1482 // Important note: the implementation assumes that for nullable flat fields, if the
1483 // null marker is zero (field is null), then all the fields of the flat field are also
1484 // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1485 // all fields are included in the map, plus the null marker
1486 // If it happens that the assumption above is wrong, then nullable flat fields would
1487 // require a dedicated section in the acmp map, and be handled differently: null_marker
1488 // comparison first, and if null markers are identical and non-zero, then conditional
1489 // comparison of the other fields
1490 }
1491 }
1492 break;
1493
1494 }
1495 b = b->next_block();
1496 }
1497
1498 // split segments into well-aligned blocks
1499 int idx = 0;
1500 while (idx < _nonoop_acmp_map->length()) {
1501 int offset = _nonoop_acmp_map->adr_at(idx)->_offset;
1502 int size = _nonoop_acmp_map->adr_at(idx)->_size;
1503 int mod = offset % 8;
1504 switch (mod) {
1505 case 0:
1506 break;
1507 case 4:
1508 split_after(_nonoop_acmp_map, idx, 4);
1509 break;
1510 case 2:
1511 case 6:
1512 split_after(_nonoop_acmp_map, idx, 2);
1513 break;
1514 case 1:
1515 case 3:
1516 case 5:
1517 case 7:
1518 split_after(_nonoop_acmp_map, idx, 1);
1519 break;
1520 default:
1521 ShouldNotReachHere();
1522 }
1523 idx++;
1524 }
1525 }
1526
1527 void FieldLayoutBuilder::epilogue() {
1528 // Computing oopmaps
1529 OopMapBlocksBuilder* nonstatic_oop_maps =
1530 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1531 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1532 if (super_oop_map_count > 0) {
1533 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1534 _super_klass->nonstatic_oop_map_count());
1535 }
1536 register_embedded_oops(nonstatic_oop_maps, _root_group);
1537 if (!_contended_groups.is_empty()) {
1538 for (int i = 0; i < _contended_groups.length(); i++) {
1539 FieldGroup* cg = _contended_groups.at(i);
1540 register_embedded_oops(nonstatic_oop_maps, cg);
1541 }
1542 }
1543 nonstatic_oop_maps->compact();
1544
1545 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1546 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1547 int static_fields_size = (static_fields_end -
1548 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1549 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1550
1551 // Pass back information needed for InstanceKlass creation
1552
1553 _info->oop_map_blocks = nonstatic_oop_maps;
1554 _info->_instance_size = align_object_size(instance_end / wordSize);
1555 _info->_static_field_size = static_fields_size;
1556 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1557 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1558 _info->_has_inlined_fields = _has_inlined_fields;
1559 _info->_is_naturally_atomic = _is_naturally_atomic;
1560 if (_is_inline_type) {
1561 _info->_must_be_atomic = _must_be_atomic;
1562 _info->_payload_alignment = _payload_alignment;
1563 _info->_payload_offset = _payload_offset;
1564 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1565 _info->_null_free_non_atomic_size_in_bytes = _null_free_non_atomic_layout_size_in_bytes;
1566 _info->_null_free_non_atomic_alignment = _null_free_non_atomic_layout_alignment;
1567 _info->_null_free_atomic_layout_size_in_bytes = _null_free_atomic_layout_size_in_bytes;
1568 _info->_nullable_atomic_layout_size_in_bytes = _nullable_atomic_layout_size_in_bytes;
1569 _info->_nullable_non_atomic_layout_size_in_bytes = _nullable_non_atomic_layout_size_in_bytes;
1570 _info->_null_marker_offset = _null_marker_offset;
1571 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1572 _info->_is_empty_inline_klass = _is_empty_inline_class;
1573 }
1574
1575 // Acmp maps are needed for both concrete and abstract value classes
1576 if (_is_inline_type || _is_abstract_value) {
1577 _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1578 _info->_nonoop_acmp_map = _nonoop_acmp_map;
1579 _info->_oop_acmp_map = _oop_acmp_map;
1580 }
1581
1582 // This may be too restrictive, since if all the fields fit in 64
1583 // bits we could make the decision to align instances of this class
1584 // to 64-bit boundaries, and load and store them as single words.
1585 // And on machines which supported larger atomics we could similarly
1586 // allow larger values to be atomic, if properly aligned.
1587
1588 #ifdef ASSERT
1589 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1590 // which prints the details of LayoutRawBlocks used to compute the layout.
1591 // The code below checks that offsets in the _field_info meta-data match offsets
1592 // in the LayoutRawBlocks
1593 LayoutRawBlock* b = _layout->blocks();
1594 while(b != _layout->last_block()) {
1595 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1596 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1597 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1598 }
1599 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1600 }
1601 b = b->next_block();
1602 }
1603 b = _static_layout->blocks();
1604 while(b != _static_layout->last_block()) {
1605 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1606 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1607 }
1608 b = b->next_block();
1609 }
1610 #endif // ASSERT
1611
1612 static bool first_layout_print = true;
1613
1614 if (PrintFieldLayout || (PrintInlineLayout && (_has_inlineable_fields || _is_inline_type || _is_abstract_value))) {
1615 ResourceMark rm;
1616 stringStream st;
1617 if (first_layout_print) {
1618 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1619 st.print_cr("Heap oop size = %d", heapOopSize);
1620 first_layout_print = false;
1621 }
1622 if (_super_klass != nullptr) {
1623 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1624 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1625 } else {
1626 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1627 }
1628 st.print_cr("Instance fields:");
1629 const bool dummy_field_is_reused_as_null_marker = _is_empty_inline_class && _null_marker_offset != -1;
1630 _layout->print(&st, false, _super_klass, _inline_layout_info_array, dummy_field_is_reused_as_null_marker);
1631 st.print_cr("Static fields:");
1632 _static_layout->print(&st, true, nullptr, _inline_layout_info_array, false);
1633 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1634 if (_is_inline_type) {
1635 st.print_cr("First field offset = %d", _payload_offset);
1636 st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1637 _payload_size_in_bytes, _payload_alignment);
1638 if (has_null_free_non_atomic_flat_layout()) {
1639 st.print_cr("%s layout: %d/%d",
1640 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1641 _null_free_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1642 } else {
1643 st.print_cr("%s layout: -/-",
1644 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1645 }
1646 if (has_null_free_atomic_layout()) {
1647 st.print_cr("%s layout: %d/%d",
1648 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1649 _null_free_atomic_layout_size_in_bytes, _null_free_atomic_layout_size_in_bytes);
1650 } else {
1651 st.print_cr("%s layout: -/-",
1652 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1653 }
1654 if (has_nullable_atomic_layout()) {
1655 st.print_cr("%s layout: %d/%d",
1656 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1657 _nullable_atomic_layout_size_in_bytes, _nullable_atomic_layout_size_in_bytes);
1658 } else {
1659 st.print_cr("%s layout: -/-",
1660 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1661 }
1662 if (has_nullable_non_atomic_layout()) {
1663 st.print_cr("%s layout: %d/%d",
1664 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT),
1665 _nullable_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1666 } else {
1667 st.print_cr("%s layout: -/-",
1668 LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT));
1669 }
1670 if (_null_marker_offset != -1) {
1671 st.print_cr("Null marker offset = %d", _null_marker_offset);
1672 }
1673 st.print("Non-oop acmp map <offset,size>: ");
1674 for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1675 st.print("<%d,%d> ", _nonoop_acmp_map->at(i)._offset, _nonoop_acmp_map->at(i)._size);
1676 }
1677 st.print_cr("");
1678 st.print("oop acmp map: ");
1679 for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1680 st.print("%d ", _oop_acmp_map->at(i));
1681 }
1682 st.print_cr("");
1683 }
1684 st.print_cr("---");
1685 // Print output all together.
1686 tty->print_raw(st.as_string());
1687 }
1688 }
1689
1690 void FieldLayoutBuilder::build_layout() {
1691 if (_is_inline_type || _is_abstract_value) {
1692 compute_inline_class_layout();
1693 } else {
1694 compute_regular_layout();
1695 }
1696 }
|